repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
CospanDesign/nysa-gui | NysaGui/host/stepper_controller/controller.py | 1 | 4250 | #! /usr/bin/python
# Copyright (c) 2014 Dave McCoy (dave.mccoy@cospandesign.com)
# This file is part of Nysa (wiki.cospandesign.com/index.php?title=Nysa).
#
# Nysa is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# Nysa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Nysa; If not, see <http://www.gnu.org/licenses/>.
"""
Stepper Motor Controller
"""
__author__ = 'dave.mccoy@cospandesign.com (Dave McCoy)'
import os
import sys
import argparse
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qt import *
from nysa.common import status
from nysa.host import platform_scanner
from nysa.host.driver.stepper import Stepper
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir,
"common"))
from nysa_base_controller import NysaBaseController
DRIVER = Stepper
APP_NAME = "Stepper Motor Controller"
n = str(os.path.split(__file__)[1])
from stepper_actions import StepperActions
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir,
"common"))
from view.view import View
from protocol_utils.stepper.stepper_engine import StepperEngine
DESCRIPTION = "\n" \
"\n"\
"%s\n" % APP_NAME
EPILOG = "\n" \
"\n"\
"Examples:\n"\
"\tSomething\n" \
"\t\t%s Something\n"\
"\n" % n
class Controller(NysaBaseController):
@staticmethod
def get_name():
return APP_NAME
@staticmethod
def get_driver():
return DRIVER
def __init__(self):
super (Controller, self).__init__()
self.actions = StepperActions()
def _initialize(self, platform, urn):
self.stepper = Stepper(platform, urn, self.status)
self.v = View(self.status, self.actions)
self.engine = StepperEngine(self.stepper, self.status, self.actions)
self.engine.update_configuration(self.v.get_configuration())
def start_tab_view(self, platform, urn, status):
self.status = status
self.status.Verbose( "Starting Template Application")
self._initialize(platform, urn)
def get_view(self):
return self.v
def main():
#Parse out the commandline arguments
s = status.Status()
s.set_level("info")
parser = argparse.ArgumentParser(
formatter_class = argparse.RawDescriptionHelpFormatter,
description = DESCRIPTION,
epilog = EPILOG
)
parser.add_argument("-d", "--debug",
action = "store_true",
help = "Enable Debug Messages")
parser.add_argument("platform",
type = str,
nargs='?',
default=["first"],
help="Specify the platform to use")
args = parser.parse_args()
if args.debug:
s.set_level("verbose")
s.Debug("Debug Enabled")
s.Verbose("platform scanner: %s" % str(dir(platform_scanner)))
platforms = platform_scanner.get_platforms_with_device(DRIVER, s)
if len(platforms) == 0:
sys.exit("Didn't find any platforms with device: %s" % str(DRIVER))
platform = platforms[0]
urn = platform.find_device(DRIVER)[0]
s.Important("Using: %s" % platform.get_board_name())
#Get a reference to the controller
c = Controller()
#Initialize the application
app = QApplication(sys.argv)
main = QMainWindow()
#Tell the controller to set things up
c.start_tab_view(platform, urn, s)
QThread.currentThread().setObjectName("main")
s.Verbose("Thread name: %s" % QThread.currentThread().objectName())
#Pass in the view to the main widget
main.setCentralWidget(c.get_view())
main.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
| gpl-2.0 |
cgqyh/pyalgotrade-mod | testcases/drawdown_analyzer_test.py | 1 | 10988 | # PyAlgoTrade
#
# Copyright 2011-2015 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import datetime
import common
import strategy_test
from pyalgotrade.barfeed import yahoofeed
from pyalgotrade.barfeed import membf
from pyalgotrade.stratanalyzer import drawdown
from pyalgotrade import broker
from pyalgotrade import bar
def build_bars_from_closing_prices(closingPrices):
ret = []
nextDateTime = datetime.datetime.now()
for closePrice in closingPrices:
bar_ = bar.BasicBar(nextDateTime, closePrice, closePrice, closePrice, closePrice, closePrice, closePrice, bar.Frequency.DAY)
ret.append(bar_)
nextDateTime = nextDateTime + datetime.timedelta(days=1)
return ret
class TestBarFeed(membf.BarFeed):
def barsHaveAdjClose(self):
raise NotImplementedError()
class DDHelperCase(common.TestCase):
def testNoDrawDown1(self):
helper = drawdown.DrawDownHelper()
helper.update(datetime.datetime.now(), 10, 10)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
def testNoDrawDown2(self):
helper = drawdown.DrawDownHelper()
dt = datetime.datetime.now()
helper.update(dt, 10, 10)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
dt += datetime.timedelta(days=1)
helper.update(dt, 10.01, 10.01)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
dt += datetime.timedelta(days=1)
helper.update(dt, 11, 11)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
def testDrawDown1(self):
helper = drawdown.DrawDownHelper()
dt = datetime.datetime.now()
helper.update(dt, 10, 10)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
dt += datetime.timedelta(days=1)
helper.update(dt, 5, 5)
self.assertEqual(helper.getMaxDrawDown(), -0.5)
self.assertEqual(helper.getCurrentDrawDown(), -0.5)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=1))
dt += datetime.timedelta(days=1)
helper.update(dt, 4, 4)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.6)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=2))
dt += datetime.timedelta(days=1)
helper.update(dt, 4, 4)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.6)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=3))
dt += datetime.timedelta(days=1)
helper.update(dt, 5, 5)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.5)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=4))
dt += datetime.timedelta(days=1)
helper.update(dt, 9, 9)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.1)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=5))
dt += datetime.timedelta(days=1)
helper.update(dt, 9.9, 9.9)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(round(helper.getCurrentDrawDown(), 2), -0.01)
self.assertEqual(helper.getDuration(), datetime.timedelta(days=6))
def testDrawDown2(self):
helper = drawdown.DrawDownHelper()
dt = datetime.datetime.now()
helper.update(dt, 10, 10)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
dt += datetime.timedelta(minutes=1)
helper.update(dt, 5, 5)
self.assertEqual(helper.getMaxDrawDown(), -0.5)
self.assertEqual(helper.getCurrentDrawDown(), -0.5)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=1))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 4, 4)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.6)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=2))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 4, 4)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.6)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=3))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 5, 5)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.5)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=4))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 9, 9)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(helper.getCurrentDrawDown(), -0.1)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=5))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 9.9, 9.9)
self.assertEqual(helper.getMaxDrawDown(), -0.6)
self.assertEqual(round(helper.getCurrentDrawDown(), 2), -0.01)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=6))
dt += datetime.timedelta(minutes=1)
helper.update(dt, 20, 20)
self.assertEqual(helper.getMaxDrawDown(), 0)
self.assertEqual(helper.getCurrentDrawDown(), 0)
self.assertEqual(helper.getDuration(), datetime.timedelta())
dt += datetime.timedelta(minutes=1)
helper.update(dt, 10, 10)
self.assertEqual(helper.getMaxDrawDown(), -0.5)
self.assertEqual(helper.getCurrentDrawDown(), -0.5)
self.assertEqual(helper.getDuration(), datetime.timedelta(minutes=1))
class AnalyzerTestCase(common.TestCase):
def testNoTrades(self):
barFeed = yahoofeed.Feed()
barFeed.addBarsFromCSV("ige", common.get_data_file_path("sharpe-ratio-test-ige.csv"))
barFeed.addBarsFromCSV("spy", common.get_data_file_path("sharpe-ratio-test-spy.csv"))
strat = strategy_test.TestStrategy(barFeed, 1000)
strat.setBrokerOrdersGTC(True)
strat.setUseAdjustedValues(True)
stratAnalyzer = drawdown.DrawDown()
strat.attachAnalyzer(stratAnalyzer)
strat.run()
self.assertTrue(strat.getBroker().getCash() == 1000)
self.assertEqual(strat.orderUpdatedCalls, 0)
self.assertTrue(stratAnalyzer.getMaxDrawDown() == 0)
self.assertTrue(stratAnalyzer.getLongestDrawDownDuration() == datetime.timedelta())
def __testIGE_BrokerImpl(self, quantity):
initialCash = 42.09*quantity
# This testcase is based on an example from Ernie Chan's book:
# 'Quantitative Trading: How to Build Your Own Algorithmic Trading Business'
barFeed = yahoofeed.Feed()
barFeed.addBarsFromCSV("ige", common.get_data_file_path("sharpe-ratio-test-ige.csv"))
strat = strategy_test.TestStrategy(barFeed, initialCash)
strat.setUseAdjustedValues(True)
strat.setBrokerOrdersGTC(True)
stratAnalyzer = drawdown.DrawDown()
strat.attachAnalyzer(stratAnalyzer)
# Disable volume checks to match book results.
strat.getBroker().getFillStrategy().setVolumeLimit(None)
# Manually place the order to get it filled on the first bar.
order = strat.getBroker().createMarketOrder(broker.Order.Action.BUY, "ige", quantity, True) # Adj. Close: 42.09
order.setGoodTillCanceled(True)
strat.getBroker().placeOrder(order)
strat.addOrder(datetime.datetime(2007, 11, 13), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, "ige", quantity, True) # Adj. Close: 127.64
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == initialCash + (127.64 - 42.09) * quantity)
self.assertEqual(strat.orderUpdatedCalls, 4)
self.assertTrue(round(stratAnalyzer.getMaxDrawDown(), 5) == 0.31178)
self.assertTrue(stratAnalyzer.getLongestDrawDownDuration() == datetime.timedelta(days=623))
def testIGE_Broker(self):
self.__testIGE_BrokerImpl(1)
def testIGE_Broker2(self):
self.__testIGE_BrokerImpl(2)
def __testManualImpl(self, closingPrices, cash):
barFeed = TestBarFeed(bar.Frequency.DAY)
bars = build_bars_from_closing_prices(closingPrices)
barFeed.addBarsFromSequence("orcl", bars)
strat = strategy_test.TestStrategy(barFeed, cash)
stratAnalyzer = drawdown.DrawDown()
strat.attachAnalyzer(stratAnalyzer)
# Manually place the order to get it filled on the first bar.
order = strat.getBroker().createMarketOrder(broker.Order.Action.BUY, "orcl", 1, True)
order.setGoodTillCanceled(True)
strat.getBroker().placeOrder(order)
strat.run()
return stratAnalyzer
def testManual_NoDD(self):
# No drawdown
stratAnalyzer = self.__testManualImpl([10, 10, 10], 10)
self.assertEqual(round(stratAnalyzer.getMaxDrawDown(), 2), 0)
self.assertEqual(stratAnalyzer.getLongestDrawDownDuration(), datetime.timedelta())
def testManual_1DD(self):
stratAnalyzer = self.__testManualImpl([10, 9, 8], 10)
self.assertEqual(round(stratAnalyzer.getMaxDrawDown(), 2), 0.2)
self.assertEqual(stratAnalyzer.getLongestDrawDownDuration(), datetime.timedelta(days=2))
def testManual_2DD(self):
stratAnalyzer = self.__testManualImpl([10, 9.5, 9, 8, 11, 8], 10)
self.assertEqual(round(stratAnalyzer.getMaxDrawDown(), 2), 0.27)
self.assertEqual(stratAnalyzer.getLongestDrawDownDuration(), datetime.timedelta(days=3))
| apache-2.0 |
lupyuen/RaspberryPiImage | home/pi/GrovePi/Software/Python/others/temboo/Library/RunKeeper/Users/RetrieveUserInfo.py | 4 | 3043 | # -*- coding: utf-8 -*-
###############################################################################
#
# RetrieveUserInfo
# Returns the available resources for a specific user and the URIs for accessing them.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RetrieveUserInfo(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RetrieveUserInfo Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RetrieveUserInfo, self).__init__(temboo_session, '/Library/RunKeeper/Users/RetrieveUserInfo')
def new_input_set(self):
return RetrieveUserInfoInputSet()
def _make_result_set(self, result, path):
return RetrieveUserInfoResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RetrieveUserInfoChoreographyExecution(session, exec_id, path)
class RetrieveUserInfoInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RetrieveUserInfo
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved after the final step in the OAuth process.)
"""
super(RetrieveUserInfoInputSet, self)._set_input('AccessToken', value)
class RetrieveUserInfoResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RetrieveUserInfo Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from RunKeeper.)
"""
return self._output.get('Response', None)
class RetrieveUserInfoChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RetrieveUserInfoResultSet(response, path)
| apache-2.0 |
Achint08/open-event-orga-server | migrations/versions/a89a143735e5_.py | 10 | 1209 | """empty message
Revision ID: a89a143735e5
Revises: f1531223b9e5
Create Date: 2016-07-24 15:10:24.786000
"""
# revision identifiers, used by Alembic.
revision = 'a89a143735e5'
down_revision = 'f1531223b9e5'
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('call_for_papers', sa.Column('timezone', sa.String(), nullable=True))
op.alter_column('events', 'has_session_speakers',
existing_type=sa.BOOLEAN(),
nullable=True)
op.drop_column('session', 'timezone')
op.drop_column('session_version', 'timezone')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('session_version', sa.Column('timezone', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('session', sa.Column('timezone', sa.VARCHAR(), autoincrement=False, nullable=True))
op.alter_column('events', 'has_session_speakers',
existing_type=sa.BOOLEAN(),
nullable=False)
op.drop_column('call_for_papers', 'timezone')
### end Alembic commands ###
| gpl-3.0 |
mbauskar/helpdesk-erpnext | erpnext/patches/v5_0/update_item_description_and_image.py | 102 | 1951 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
from frappe.website.utils import find_first_image
from frappe.utils import cstr
import re
def execute():
item_details = frappe._dict()
for d in frappe.db.sql("select name, description_html, description from `tabItem`", as_dict=1):
description = cstr(d.description_html).strip() or cstr(d.description).strip()
image_url, new_desc = extract_image_and_description(description)
item_details.setdefault(d.name, frappe._dict({
"old_description": description,
"new_description": new_desc,
"image_url": image_url
}))
dt_list= ["Purchase Order Item","Supplier Quotation Item", "BOM", "BOM Explosion Item" , \
"BOM Item", "Opportunity Item" , "Quotation Item" , "Sales Order Item" , "Delivery Note Item" , \
"Material Request Item" , "Purchase Receipt Item" , "Stock Entry Detail"]
for dt in dt_list:
frappe.reload_doctype(dt)
records = frappe.db.sql("""select name, `{0}` as item_code, description from `tab{1}`
where description is not null and image is null and description like '%%<img%%'"""
.format("item" if dt=="BOM" else "item_code", dt), as_dict=1)
count = 1
for d in records:
if d.item_code and item_details.get(d.item_code) \
and cstr(d.description) == item_details.get(d.item_code).old_description:
image_url = item_details.get(d.item_code).image_url
desc = item_details.get(d.item_code).new_description
else:
image_url, desc = extract_image_and_description(cstr(d.description))
if image_url:
frappe.db.sql("""update `tab{0}` set description = %s, image = %s
where name = %s """.format(dt), (desc, image_url, d.name))
count += 1
if count % 500 == 0:
frappe.db.commit()
def extract_image_and_description(data):
image_url = find_first_image(data)
desc = re.sub("\<img[^>]+\>", "", data)
return image_url, desc
| agpl-3.0 |
alfa-jor/addon | plugin.video.alfa/lib/python_libtorrent/ios_arm/1.0.8/__init__.py | 362 | 1240 | #-*- coding: utf-8 -*-
'''
python-libtorrent for Kodi (script.module.libtorrent)
Copyright (C) 2015-2016 DiMartino, srg70, RussakHH, aisman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
| gpl-3.0 |
dietrichc/streamline-ppc-reports | examples/dfp/v201408/line_item_service/get_all_line_items.py | 4 | 1764 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all line items.
To create line items, run create_line_items.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
line_item_service = client.GetService('LineItemService', version='v201408')
# Create a filter statement.
statement = dfp.FilterStatement()
# Get line items by statement.
while True:
response = line_item_service.getLineItemsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for line_item in response['results']:
print ('Line item with id \'%s\', belonging to order id \'%s\', and '
'named \'%s\' was found.' %
(line_item['id'], line_item['orderId'], line_item['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| apache-2.0 |
lz1988/company-site | django/contrib/localflavor/us/us_states.py | 355 | 7655 | """
A mapping of state misspellings/abbreviations to normalized
abbreviations, and alphabetical lists of US states, territories,
military mail regions and non-US states to which the US provides
postal service.
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
# The 48 contiguous states, plus the District of Columbia.
CONTIGUOUS_STATES = (
('AL', 'Alabama'),
('AZ', 'Arizona'),
('AR', 'Arkansas'),
('CA', 'California'),
('CO', 'Colorado'),
('CT', 'Connecticut'),
('DE', 'Delaware'),
('DC', 'District of Columbia'),
('FL', 'Florida'),
('GA', 'Georgia'),
('ID', 'Idaho'),
('IL', 'Illinois'),
('IN', 'Indiana'),
('IA', 'Iowa'),
('KS', 'Kansas'),
('KY', 'Kentucky'),
('LA', 'Louisiana'),
('ME', 'Maine'),
('MD', 'Maryland'),
('MA', 'Massachusetts'),
('MI', 'Michigan'),
('MN', 'Minnesota'),
('MS', 'Mississippi'),
('MO', 'Missouri'),
('MT', 'Montana'),
('NE', 'Nebraska'),
('NV', 'Nevada'),
('NH', 'New Hampshire'),
('NJ', 'New Jersey'),
('NM', 'New Mexico'),
('NY', 'New York'),
('NC', 'North Carolina'),
('ND', 'North Dakota'),
('OH', 'Ohio'),
('OK', 'Oklahoma'),
('OR', 'Oregon'),
('PA', 'Pennsylvania'),
('RI', 'Rhode Island'),
('SC', 'South Carolina'),
('SD', 'South Dakota'),
('TN', 'Tennessee'),
('TX', 'Texas'),
('UT', 'Utah'),
('VT', 'Vermont'),
('VA', 'Virginia'),
('WA', 'Washington'),
('WV', 'West Virginia'),
('WI', 'Wisconsin'),
('WY', 'Wyoming'),
)
# All 50 states, plus the District of Columbia.
US_STATES = (
('AL', 'Alabama'),
('AK', 'Alaska'),
('AZ', 'Arizona'),
('AR', 'Arkansas'),
('CA', 'California'),
('CO', 'Colorado'),
('CT', 'Connecticut'),
('DE', 'Delaware'),
('DC', 'District of Columbia'),
('FL', 'Florida'),
('GA', 'Georgia'),
('HI', 'Hawaii'),
('ID', 'Idaho'),
('IL', 'Illinois'),
('IN', 'Indiana'),
('IA', 'Iowa'),
('KS', 'Kansas'),
('KY', 'Kentucky'),
('LA', 'Louisiana'),
('ME', 'Maine'),
('MD', 'Maryland'),
('MA', 'Massachusetts'),
('MI', 'Michigan'),
('MN', 'Minnesota'),
('MS', 'Mississippi'),
('MO', 'Missouri'),
('MT', 'Montana'),
('NE', 'Nebraska'),
('NV', 'Nevada'),
('NH', 'New Hampshire'),
('NJ', 'New Jersey'),
('NM', 'New Mexico'),
('NY', 'New York'),
('NC', 'North Carolina'),
('ND', 'North Dakota'),
('OH', 'Ohio'),
('OK', 'Oklahoma'),
('OR', 'Oregon'),
('PA', 'Pennsylvania'),
('RI', 'Rhode Island'),
('SC', 'South Carolina'),
('SD', 'South Dakota'),
('TN', 'Tennessee'),
('TX', 'Texas'),
('UT', 'Utah'),
('VT', 'Vermont'),
('VA', 'Virginia'),
('WA', 'Washington'),
('WV', 'West Virginia'),
('WI', 'Wisconsin'),
('WY', 'Wyoming'),
)
# Non-state territories.
US_TERRITORIES = (
('AS', 'American Samoa'),
('GU', 'Guam'),
('MP', 'Northern Mariana Islands'),
('PR', 'Puerto Rico'),
('VI', 'Virgin Islands'),
)
# Military postal "states". Note that 'AE' actually encompasses
# Europe, Canada, Africa and the Middle East.
ARMED_FORCES_STATES = (
('AA', 'Armed Forces Americas'),
('AE', 'Armed Forces Europe'),
('AP', 'Armed Forces Pacific'),
)
# Non-US locations serviced by USPS (under Compact of Free
# Association).
COFA_STATES = (
('FM', 'Federated States of Micronesia'),
('MH', 'Marshall Islands'),
('PW', 'Palau'),
)
# Obsolete abbreviations (no longer US territories/USPS service, or
# code changed).
OBSOLETE_STATES = (
('CM', 'Commonwealth of the Northern Mariana Islands'), # Is now 'MP'
('CZ', 'Panama Canal Zone'), # Reverted to Panama 1979
('PI', 'Philippine Islands'), # Philippine independence 1946
('TT', 'Trust Territory of the Pacific Islands'), # Became the independent COFA states + Northern Mariana Islands 1979-1994
)
# All US states and territories plus DC and military mail.
STATE_CHOICES = tuple(sorted(US_STATES + US_TERRITORIES + ARMED_FORCES_STATES, key=lambda obj: obj[1]))
# All US Postal Service locations.
USPS_CHOICES = tuple(sorted(US_STATES + US_TERRITORIES + ARMED_FORCES_STATES + COFA_STATES, key=lambda obj: obj[1]))
STATES_NORMALIZED = {
'ak': 'AK',
'al': 'AL',
'ala': 'AL',
'alabama': 'AL',
'alaska': 'AK',
'american samao': 'AS',
'american samoa': 'AS',
'ar': 'AR',
'ariz': 'AZ',
'arizona': 'AZ',
'ark': 'AR',
'arkansas': 'AR',
'as': 'AS',
'az': 'AZ',
'ca': 'CA',
'calf': 'CA',
'calif': 'CA',
'california': 'CA',
'co': 'CO',
'colo': 'CO',
'colorado': 'CO',
'conn': 'CT',
'connecticut': 'CT',
'ct': 'CT',
'dc': 'DC',
'de': 'DE',
'del': 'DE',
'delaware': 'DE',
'deleware': 'DE',
'district of columbia': 'DC',
'fl': 'FL',
'fla': 'FL',
'florida': 'FL',
'ga': 'GA',
'georgia': 'GA',
'gu': 'GU',
'guam': 'GU',
'hawaii': 'HI',
'hi': 'HI',
'ia': 'IA',
'id': 'ID',
'idaho': 'ID',
'il': 'IL',
'ill': 'IL',
'illinois': 'IL',
'in': 'IN',
'ind': 'IN',
'indiana': 'IN',
'iowa': 'IA',
'kan': 'KS',
'kans': 'KS',
'kansas': 'KS',
'kentucky': 'KY',
'ks': 'KS',
'ky': 'KY',
'la': 'LA',
'louisiana': 'LA',
'ma': 'MA',
'maine': 'ME',
'marianas islands': 'MP',
'marianas islands of the pacific': 'MP',
'marinas islands of the pacific': 'MP',
'maryland': 'MD',
'mass': 'MA',
'massachusetts': 'MA',
'massachussetts': 'MA',
'md': 'MD',
'me': 'ME',
'mi': 'MI',
'mich': 'MI',
'michigan': 'MI',
'minn': 'MN',
'minnesota': 'MN',
'miss': 'MS',
'mississippi': 'MS',
'missouri': 'MO',
'mn': 'MN',
'mo': 'MO',
'mont': 'MT',
'montana': 'MT',
'mp': 'MP',
'ms': 'MS',
'mt': 'MT',
'n d': 'ND',
'n dak': 'ND',
'n h': 'NH',
'n j': 'NJ',
'n m': 'NM',
'n mex': 'NM',
'nc': 'NC',
'nd': 'ND',
'ne': 'NE',
'neb': 'NE',
'nebr': 'NE',
'nebraska': 'NE',
'nev': 'NV',
'nevada': 'NV',
'new hampshire': 'NH',
'new jersey': 'NJ',
'new mexico': 'NM',
'new york': 'NY',
'nh': 'NH',
'nj': 'NJ',
'nm': 'NM',
'nmex': 'NM',
'north carolina': 'NC',
'north dakota': 'ND',
'northern mariana islands': 'MP',
'nv': 'NV',
'ny': 'NY',
'oh': 'OH',
'ohio': 'OH',
'ok': 'OK',
'okla': 'OK',
'oklahoma': 'OK',
'or': 'OR',
'ore': 'OR',
'oreg': 'OR',
'oregon': 'OR',
'pa': 'PA',
'penn': 'PA',
'pennsylvania': 'PA',
'pr': 'PR',
'puerto rico': 'PR',
'rhode island': 'RI',
'ri': 'RI',
's dak': 'SD',
'sc': 'SC',
'sd': 'SD',
'sdak': 'SD',
'south carolina': 'SC',
'south dakota': 'SD',
'tenn': 'TN',
'tennessee': 'TN',
'territory of hawaii': 'HI',
'tex': 'TX',
'texas': 'TX',
'tn': 'TN',
'tx': 'TX',
'us virgin islands': 'VI',
'usvi': 'VI',
'ut': 'UT',
'utah': 'UT',
'va': 'VA',
'vermont': 'VT',
'vi': 'VI',
'viginia': 'VA',
'virgin islands': 'VI',
'virgina': 'VA',
'virginia': 'VA',
'vt': 'VT',
'w va': 'WV',
'wa': 'WA',
'wash': 'WA',
'washington': 'WA',
'west virginia': 'WV',
'wi': 'WI',
'wis': 'WI',
'wisc': 'WI',
'wisconsin': 'WI',
'wv': 'WV',
'wva': 'WV',
'wy': 'WY',
'wyo': 'WY',
'wyoming': 'WY',
}
| bsd-3-clause |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/pdfminer/pdftypes.py | 9 | 7946 | #!/usr/bin/env python
import zlib
from lzw import lzwdecode
from ascii85 import ascii85decode, asciihexdecode
from runlength import rldecode
from ccitt import ccittfaxdecode
from psparser import PSException, PSObject
from psparser import LIT, STRICT
from utils import apply_png_predictor, isnumber
LITERAL_CRYPT = LIT('Crypt')
# Abbreviation of Filter names in PDF 4.8.6. "Inline Images"
LITERALS_FLATE_DECODE = (LIT('FlateDecode'), LIT('Fl'))
LITERALS_LZW_DECODE = (LIT('LZWDecode'), LIT('LZW'))
LITERALS_ASCII85_DECODE = (LIT('ASCII85Decode'), LIT('A85'))
LITERALS_ASCIIHEX_DECODE = (LIT('ASCIIHexDecode'), LIT('AHx'))
LITERALS_RUNLENGTH_DECODE = (LIT('RunLengthDecode'), LIT('RL'))
LITERALS_CCITTFAX_DECODE = (LIT('CCITTFaxDecode'), LIT('CCF'))
LITERALS_DCT_DECODE = (LIT('DCTDecode'), LIT('DCT'))
## PDF Objects
##
class PDFObject(PSObject):
pass
class PDFException(PSException):
pass
class PDFTypeError(PDFException):
pass
class PDFValueError(PDFException):
pass
class PDFObjectNotFound(PDFException):
pass
class PDFNotImplementedError(PDFException):
pass
## PDFObjRef
##
class PDFObjRef(PDFObject):
def __init__(self, doc, objid, _):
if objid == 0:
if STRICT:
raise PDFValueError('PDF object id cannot be 0.')
self.doc = doc
self.objid = objid
#self.genno = genno # Never used.
return
def __repr__(self):
return '<PDFObjRef:%d>' % (self.objid)
def resolve(self, default=None):
try:
return self.doc.getobj(self.objid)
except PDFObjectNotFound:
return default
# resolve
def resolve1(x, default=None):
"""Resolves an object.
If this is an array or dictionary, it may still contains
some indirect objects inside.
"""
while isinstance(x, PDFObjRef):
x = x.resolve(default=default)
return x
def resolve_all(x, default=None):
"""Recursively resolves the given object and all the internals.
Make sure there is no indirect reference within the nested object.
This procedure might be slow.
"""
while isinstance(x, PDFObjRef):
x = x.resolve(default=default)
if isinstance(x, list):
x = [resolve_all(v, default=default) for v in x]
elif isinstance(x, dict):
for (k, v) in x.iteritems():
x[k] = resolve_all(v, default=default)
return x
def decipher_all(decipher, objid, genno, x):
"""Recursively deciphers the given object.
"""
if isinstance(x, str):
return decipher(objid, genno, x)
if isinstance(x, list):
x = [decipher_all(decipher, objid, genno, v) for v in x]
elif isinstance(x, dict):
for (k, v) in x.iteritems():
x[k] = decipher_all(decipher, objid, genno, v)
return x
# Type cheking
def int_value(x):
x = resolve1(x)
if not isinstance(x, int):
if STRICT:
raise PDFTypeError('Integer required: %r' % x)
return 0
return x
def float_value(x):
x = resolve1(x)
if not isinstance(x, float):
if STRICT:
raise PDFTypeError('Float required: %r' % x)
return 0.0
return x
def num_value(x):
x = resolve1(x)
if not isnumber(x):
if STRICT:
raise PDFTypeError('Int or Float required: %r' % x)
return 0
return x
def str_value(x):
x = resolve1(x)
if not isinstance(x, str):
if STRICT:
raise PDFTypeError('String required: %r' % x)
return ''
return x
def list_value(x):
x = resolve1(x)
if not isinstance(x, (list, tuple)):
if STRICT:
raise PDFTypeError('List required: %r' % x)
return []
return x
def dict_value(x):
x = resolve1(x)
if not isinstance(x, dict):
if STRICT:
raise PDFTypeError('Dict required: %r' % x)
return {}
return x
def stream_value(x):
x = resolve1(x)
if not isinstance(x, PDFStream):
if STRICT:
raise PDFTypeError('PDFStream required: %r' % x)
return PDFStream({}, '')
return x
## PDFStream type
##
class PDFStream(PDFObject):
def __init__(self, attrs, rawdata, decipher=None):
assert isinstance(attrs, dict)
self.attrs = attrs
self.rawdata = rawdata
self.decipher = decipher
self.data = None
self.objid = None
self.genno = None
return
def set_objid(self, objid, genno):
self.objid = objid
self.genno = genno
return
def __repr__(self):
if self.data is None:
assert self.rawdata is not None
return '<PDFStream(%r): raw=%d, %r>' % (self.objid, len(self.rawdata), self.attrs)
else:
assert self.data is not None
return '<PDFStream(%r): len=%d, %r>' % (self.objid, len(self.data), self.attrs)
def __contains__(self, name):
return name in self.attrs
def __getitem__(self, name):
return self.attrs[name]
def get(self, name, default=None):
return self.attrs.get(name, default)
def get_any(self, names, default=None):
for name in names:
if name in self.attrs:
return self.attrs[name]
return default
def get_filters(self):
filters = self.get_any(('F', 'Filter'))
if not filters:
return []
if isinstance(filters, list):
return filters
return [filters]
def decode(self):
assert self.data is None and self.rawdata is not None
data = self.rawdata
if self.decipher:
# Handle encryption
data = self.decipher(self.objid, self.genno, data)
filters = self.get_filters()
if not filters:
self.data = data
self.rawdata = None
return
for f in filters:
params = self.get_any(('DP', 'DecodeParms', 'FDecodeParms'), {})
if f in LITERALS_FLATE_DECODE:
# will get errors if the document is encrypted.
try:
data = zlib.decompress(data)
except zlib.error, e:
if STRICT:
raise PDFException('Invalid zlib bytes: %r, %r' % (e, data))
data = ''
elif f in LITERALS_LZW_DECODE:
data = lzwdecode(data)
elif f in LITERALS_ASCII85_DECODE:
data = ascii85decode(data)
elif f in LITERALS_ASCIIHEX_DECODE:
data = asciihexdecode(data)
elif f in LITERALS_RUNLENGTH_DECODE:
data = rldecode(data)
elif f in LITERALS_CCITTFAX_DECODE:
data = ccittfaxdecode(data, params)
elif f == LITERAL_CRYPT:
# not yet..
raise PDFNotImplementedError('/Crypt filter is unsupported')
else:
raise PDFNotImplementedError('Unsupported filter: %r' % f)
# apply predictors
if 'Predictor' in params:
pred = int_value(params['Predictor'])
if pred == 1:
# no predictor
pass
elif 10 <= pred:
# PNG predictor
colors = int_value(params.get('Colors', 1))
columns = int_value(params.get('Columns', 1))
bitspercomponent = int_value(params.get('BitsPerComponent', 8))
data = apply_png_predictor(pred, colors, columns, bitspercomponent, data)
else:
raise PDFNotImplementedError('Unsupported predictor: %r' % pred)
self.data = data
self.rawdata = None
return
def get_data(self):
if self.data is None:
self.decode()
return self.data
def get_rawdata(self):
return self.rawdata
| agpl-3.0 |
collective/zettwerk.fullcalendar | zettwerk/fullcalendar/tests/base.py | 1 | 2164 | """Test setup for integration and functional tests.
When we import PloneTestCase and then call setupPloneSite(), all of
Plone's products are loaded, and a Plone site will be created. This
happens at module level, which makes it faster to run each test, but
slows down test runner startup.
"""
from Products.Five import zcml
from Products.Five import fiveconfigure
from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
# When ZopeTestCase configures Zope, it will *not* auto-load products
# in Products/. Instead, we have to use a statement such as:
# ztc.installProduct('SimpleAttachment')
# This does *not* apply to products in eggs and Python packages (i.e.
# not in the Products.*) namespace. For that, see below.
# All of Plone's products are already set up by PloneTestCase.
@onsetup
def setup_product():
"""Set up the package and its dependencies.
The @onsetup decorator causes the execution of this body to be
deferred until the setup of the Plone site testing layer. We could
have created our own layer, but this is the easiest way for Plone
integration tests.
"""
fiveconfigure.debug_mode = True
import zettwerk.fullcalendar
zcml.load_config('configure.zcml', zettwerk.fullcalendar)
fiveconfigure.debug_mode = False
ztc.installPackage('zettwerk.fullcalendar')
setup_product()
ptc.setupPloneSite(products=['zettwerk.fullcalendar'])
class TestCase(ptc.PloneTestCase):
"""We use this base class for all the tests in this package. If
necessary, we can put common utility or setup code in here. This
applies to unit test cases.
"""
class FunctionalTestCase(ptc.FunctionalTestCase):
"""We use this class for functional integration tests that use
doctest syntax. Again, we can put basic common utility or setup
code in here.
"""
def afterSetUp(self):
roles = ('Member', 'Contributor')
self.portal.portal_membership.addMember('contributor',
'secret',
roles, [])
| gpl-2.0 |
hurricup/intellij-community | plugins/hg4idea/testData/bin/mercurial/dagutil.py | 93 | 8237 | # dagutil.py - dag utilities for mercurial
#
# Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
# and Peter Arrenbrecht <peter@arrenbrecht.ch>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullrev
from i18n import _
class basedag(object):
'''generic interface for DAGs
terms:
"ix" (short for index) identifies a nodes internally,
"id" identifies one externally.
All params are ixs unless explicitly suffixed otherwise.
Pluralized params are lists or sets.
'''
def __init__(self):
self._inverse = None
def nodeset(self):
'''set of all node idxs'''
raise NotImplementedError
def heads(self):
'''list of head ixs'''
raise NotImplementedError
def parents(self, ix):
'''list of parents ixs of ix'''
raise NotImplementedError
def inverse(self):
'''inverse DAG, where parents becomes children, etc.'''
raise NotImplementedError
def ancestorset(self, starts, stops=None):
'''
set of all ancestors of starts (incl), but stop walk at stops (excl)
'''
raise NotImplementedError
def descendantset(self, starts, stops=None):
'''
set of all descendants of starts (incl), but stop walk at stops (excl)
'''
return self.inverse().ancestorset(starts, stops)
def headsetofconnecteds(self, ixs):
'''
subset of connected list of ixs so that no node has a descendant in it
By "connected list" we mean that if an ancestor and a descendant are in
the list, then so is at least one path connecting them.
'''
raise NotImplementedError
def externalize(self, ix):
'''return a list of (or set if given a set) of node ids'''
return self._externalize(ix)
def externalizeall(self, ixs):
'''return a list of (or set if given a set) of node ids'''
ids = self._externalizeall(ixs)
if isinstance(ixs, set):
return set(ids)
return list(ids)
def internalize(self, id):
'''return a list of (or set if given a set) of node ixs'''
return self._internalize(id)
def internalizeall(self, ids, filterunknown=False):
'''return a list of (or set if given a set) of node ids'''
ixs = self._internalizeall(ids, filterunknown)
if isinstance(ids, set):
return set(ixs)
return list(ixs)
class genericdag(basedag):
'''generic implementations for DAGs'''
def ancestorset(self, starts, stops=None):
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
n = pending.pop()
if n not in seen and n not in stops:
seen.add(n)
pending.extend(self.parents(n))
return seen
def headsetofconnecteds(self, ixs):
hds = set(ixs)
if not hds:
return hds
for n in ixs:
for p in self.parents(n):
hds.discard(p)
assert hds
return hds
class revlogbaseddag(basedag):
'''generic dag interface to a revlog'''
def __init__(self, revlog, nodeset):
basedag.__init__(self)
self._revlog = revlog
self._heads = None
self._nodeset = nodeset
def nodeset(self):
return self._nodeset
def heads(self):
if self._heads is None:
self._heads = self._getheads()
return self._heads
def _externalize(self, ix):
return self._revlog.index[ix][7]
def _externalizeall(self, ixs):
idx = self._revlog.index
return [idx[i][7] for i in ixs]
def _internalize(self, id):
ix = self._revlog.rev(id)
if ix == nullrev:
raise LookupError(id, self._revlog.indexfile, _('nullid'))
return ix
def _internalizeall(self, ids, filterunknown):
rl = self._revlog
if filterunknown:
return [r for r in map(rl.nodemap.get, ids)
if r is not None and r != nullrev]
return map(self._internalize, ids)
class revlogdag(revlogbaseddag):
'''dag interface to a revlog'''
def __init__(self, revlog):
revlogbaseddag.__init__(self, revlog, set(xrange(len(revlog))))
def _getheads(self):
return [r for r in self._revlog.headrevs() if r != nullrev]
def parents(self, ix):
rlog = self._revlog
idx = rlog.index
revdata = idx[ix]
prev = revdata[5]
if prev != nullrev:
prev2 = revdata[6]
if prev2 == nullrev:
return [prev]
return [prev, prev2]
prev2 = revdata[6]
if prev2 != nullrev:
return [prev2]
return []
def inverse(self):
if self._inverse is None:
self._inverse = inverserevlogdag(self)
return self._inverse
def ancestorset(self, starts, stops=None):
rlog = self._revlog
idx = rlog.index
stops = stops and set(stops) or set()
seen = set()
pending = list(starts)
while pending:
rev = pending.pop()
if rev not in seen and rev not in stops:
seen.add(rev)
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
pending.append(prev)
return seen
def headsetofconnecteds(self, ixs):
if not ixs:
return set()
rlog = self._revlog
idx = rlog.index
headrevs = set(ixs)
for rev in ixs:
revdata = idx[rev]
for i in [5, 6]:
prev = revdata[i]
if prev != nullrev:
headrevs.discard(prev)
assert headrevs
return headrevs
def linearize(self, ixs):
'''linearize and topologically sort a list of revisions
The linearization process tries to create long runs of revs where
a child rev comes immediately after its first parent. This is done by
visiting the heads of the given revs in inverse topological order,
and for each visited rev, visiting its second parent, then its first
parent, then adding the rev itself to the output list.
'''
sorted = []
visit = list(self.headsetofconnecteds(ixs))
visit.sort(reverse=True)
finished = set()
while visit:
cur = visit.pop()
if cur < 0:
cur = -cur - 1
if cur not in finished:
sorted.append(cur)
finished.add(cur)
else:
visit.append(-cur - 1)
visit += [p for p in self.parents(cur)
if p in ixs and p not in finished]
assert len(sorted) == len(ixs)
return sorted
class inverserevlogdag(revlogbaseddag, genericdag):
'''inverse of an existing revlog dag; see revlogdag.inverse()'''
def __init__(self, orig):
revlogbaseddag.__init__(self, orig._revlog, orig._nodeset)
self._orig = orig
self._children = {}
self._roots = []
self._walkfrom = len(self._revlog) - 1
def _walkto(self, walkto):
rev = self._walkfrom
cs = self._children
roots = self._roots
idx = self._revlog.index
while rev >= walkto:
data = idx[rev]
isroot = True
for prev in [data[5], data[6]]: # parent revs
if prev != nullrev:
cs.setdefault(prev, []).append(rev)
isroot = False
if isroot:
roots.append(rev)
rev -= 1
self._walkfrom = rev
def _getheads(self):
self._walkto(nullrev)
return self._roots
def parents(self, ix):
if ix is None:
return []
if ix <= self._walkfrom:
self._walkto(ix)
return self._children.get(ix, [])
def inverse(self):
return self._orig
| apache-2.0 |
jluebbe/tools | am335x-pinmux-dts.py | 1 | 1408 | #!/usr/bin/python
# coding: utf-8
# Copyright (c) 2013 Jan Lübbe - see LICENSE file
import re
try:
from exceptions import Exception
except ImportError:
# Python3
pass
offset = re.compile(r"(CONTROL_PADCONF_.*?)\s+(0x\w+)")
offsets = {}
for line in open('mux.h').readlines():
m = offset.search(line)
if m:
offsets[m.group(1)] = int(m.group(2), 16) - 0x800
mux = re.compile(r"(CONTROL_PADCONF_.*?), \((\w+) \| (\w+) \| (\w+) \)\) /\* ([\w\[\]]+) \*/")
for line in open('pinmux.h').readlines():
#print line
m = mux.search(line)
if m:
off = offsets[m.group(1)]
reg = 0
comment = m.group(1)[16:].lower()
if comment != m.group(5).lower():
comment += '.'+m.group(5).lower()
if m.group(2) == 'IEN':
reg |= (1<<5)
comment += ', INPUT'
elif m.group(2) == 'IDIS':
comment += ', OUTPUT'
else:
raise Exception("bad field 2: %s" % m.group(2))
if m.group(3) == 'PD':
comment += '_PULLDOWN'
elif m.group(3) == 'PU':
reg |= (2<<3)
comment += '_PULLUP'
elif m.group(3) == 'OFF':
reg |= (1<<3)
else:
raise Exception("bad field 3: %s" % m.group(3))
if m.group(4).startswith('MODE'):
reg |= int(m.group(4)[4:])
comment += ' | ' + m.group(4)
else:
raise Exception("bad field 4: %s" % m.group(4))
print(4*'\t' + '0x%03x 0x%02x' % (off, reg) + '\t' + '/* %s */' % comment)
| mit |
yfried/ansible | test/units/modules/network/nxos/test_nxos_nxapi.py | 68 | 3057 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.nxos import nxos_nxapi
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosNxapiModule(TestNxosModule):
module = nxos_nxapi
def setUp(self):
super(TestNxosNxapiModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_nxapi.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_nxapi.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_capabilities = patch('ansible.modules.network.nxos.nxos_nxapi.get_capabilities')
self.get_capabilities = self.mock_get_capabilities.start()
self.get_capabilities.return_value = {'device_info': {'network_os_platform': 'N7K-C7018', 'network_os_version': '8.3(1)'}, 'network_api': 'cliconf'}
def tearDown(self):
super(TestNxosNxapiModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
self.mock_get_capabilities.stop()
def load_fixtures(self, commands=None, device=''):
def load_from_file(*args, **kwargs):
module, commands = args
module_name = self.module.__name__.rsplit('.', 1)[1]
output = list()
for command in commands:
filename = str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture(module_name, filename, device))
return output
self.run_commands.side_effect = load_from_file
self.load_config.return_value = None
def test_nxos_nxapi_no_change(self):
set_module_args(dict(http=True, https=False, http_port=80, https_port=443, sandbox=False))
self.execute_module_devices(changed=False, commands=[])
def test_nxos_nxapi_disable(self):
set_module_args(dict(state='absent'))
self.execute_module_devices(changed=True, commands=['no feature nxapi'])
def test_nxos_nxapi_no_http(self):
set_module_args(dict(https=True, http=False, https_port=8443))
self.execute_module_devices(changed=True, commands=['no nxapi http', 'nxapi https port 8443'])
| gpl-3.0 |
Drooids/odoo | addons/crm/calendar_event.py | 375 | 1829 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import logging
_logger = logging.getLogger(__name__)
class calendar_event(osv.Model):
""" Model for Calendar Event """
_inherit = 'calendar.event'
_columns = {
'phonecall_id': fields.many2one('crm.phonecall', 'Phonecall'),
'opportunity_id': fields.many2one('crm.lead', 'Opportunity', domain="[('type', '=', 'opportunity')]"),
}
def create(self, cr, uid, vals, context=None):
res = super(calendar_event, self).create(cr, uid, vals, context=context)
obj = self.browse(cr, uid, res, context=context)
if obj.opportunity_id:
self.pool.get('crm.lead').log_meeting(cr, uid, [obj.opportunity_id.id], obj.name, obj.start, obj.duration, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
uber/vertica-python | vertica_python/vertica/column.py | 1 | 8347 | # Copyright (c) 2018-2019 Micro Focus or one of its affiliates.
# Copyright (c) 2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) 2013-2017 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function, division, absolute_import
import re
from collections import namedtuple
from datetime import date, datetime
from decimal import Decimal
import pytz
# noinspection PyCompatibility,PyUnresolvedReferences
from builtins import str
from dateutil import parser
from .. import errors
from .. import datatypes
from ..compat import as_str, as_text
YEARS_RE = re.compile(r"^([0-9]+)-")
UTF_8 = 'utf-8'
# these methods are bad...
#
# a few timestamp with tz examples:
# 2013-01-01 00:00:00
# 2013-01-01 00:00:00+00
# 2013-01-01 00:00:00.01+00
# 2013-01-01 00:00:00.00001+00
#
# Vertica stores all data in UTC:
# "TIMESTAMP WITH TIMEZONE (TIMESTAMPTZ) data is stored in GMT (UTC) by
# converting data from the current local time zone to GMT."
# Vertica fetches data in local timezone:
# "When TIMESTAMPTZ data is used, data is converted back to use the current
# local time zone"
# If vertica boxes are on UTC, you should never have a non +00 offset (as
# far as I can tell) ie. inserting '2013-01-01 00:00:00.01 EST' to a
# timestamptz type stores: 2013-01-01 05:00:00.01+00
# select t AT TIMEZONE 'America/New_York' returns: 2012-12-31 19:00:00.01
def timestamp_parse(s):
s = as_str(s)
try:
dt = _timestamp_parse(s)
except ValueError:
# Value error, year might be over 9999
year_match = YEARS_RE.match(s)
if year_match:
year = year_match.groups()[0]
dt = _timestamp_parse_without_year(s[len(year) + 1:])
dt = dt.replace(year=min(int(year), 9999))
else:
raise errors.DataError('Timestamp value not supported: %s' % s)
return dt
def _timestamp_parse(s):
if len(s) == 19:
return datetime.strptime(s, '%Y-%m-%d %H:%M:%S')
return datetime.strptime(s, '%Y-%m-%d %H:%M:%S.%f')
def _timestamp_parse_without_year(s):
if len(s) == 14:
return datetime.strptime(s, '%m-%d %H:%M:%S')
return datetime.strptime(s, '%m-%d %H:%M:%S.%f')
def timestamp_tz_parse(s):
s = as_str(s)
# if timezone is simply UTC...
if s.endswith('+00'):
# remove time zone
ts = timestamp_parse(s[:-3].encode(encoding=UTF_8, errors='strict'))
ts = ts.replace(tzinfo=pytz.UTC)
return ts
# other wise do a real parse (slower)
return parser.parse(s)
def date_parse(s):
"""
Parses value of a DATE type.
:param s: string to parse into date
:return: an instance of datetime.date
:raises NotSupportedError when a date Before Christ is encountered
"""
s = as_str(s)
if s.endswith(' BC'):
raise errors.NotSupportedError('Dates Before Christ are not supported. Got: {0}'.format(s))
# Value error, year might be over 9999
return date(*map(lambda x: min(int(x), 9999), s.split('-')))
def time_parse(s):
s = as_str(s)
if len(s) == 8:
return datetime.strptime(s, '%H:%M:%S').time()
return datetime.strptime(s, '%H:%M:%S.%f').time()
ColumnTuple = namedtuple('Column', ['name', 'type_code', 'display_size', 'internal_size',
'precision', 'scale', 'null_ok'])
class Column(object):
def __init__(self, col, unicode_error=None):
self.name = col['name']
self.type_code = col['data_type_oid']
self.type_name = col['data_type_name']
self.display_size = datatypes.getDisplaySize(col['data_type_oid'], col['type_modifier'])
self.internal_size = col['data_type_size']
self.precision = datatypes.getPrecision(col['data_type_oid'], col['type_modifier'])
self.scale = datatypes.getScale(col['data_type_oid'], col['type_modifier'])
self.null_ok = col['null_ok']
self.is_identity = col['is_identity']
self.unicode_error = unicode_error
self.data_type_conversions = Column._data_type_conversions(unicode_error=self.unicode_error)
self.props = ColumnTuple(self.name, self.type_code, self.display_size, self.internal_size,
self.precision, self.scale, self.null_ok)
# WORKAROUND: Treat LONGVARCHAR as VARCHAR
if self.type_code == 115:
self.type_code = 9
# Mark type_code as unspecified if not within known data types
if self.type_code >= len(self.data_type_conversions):
self.type_code = 0
# self.converter = self.data_type_conversions[col['data_type_oid']][1]
self.converter = self.data_type_conversions[self.type_code][1]
# things that are actually sent
# self.name = col['name']
# self.data_type = self.data_type_conversions[col['data_type_oid']][0]
# self.type_modifier = col['type_modifier']
# self.format = 'text' if col['format_code'] == 0 else 'binary'
# self.table_oid = col['table_oid']
# self.attribute_number = col['attribute_number']
# self.size = col['data_type_size']
@classmethod
def _data_type_conversions(cls, unicode_error=None):
if unicode_error is None:
unicode_error = 'strict'
return [
('unspecified', None),
('tuple', None),
('pos', None),
('record', None),
('unknown', None),
('bool', lambda s: 't' == str(s, encoding=UTF_8, errors=unicode_error)),
('integer', lambda s: int(s)),
('float', lambda s: float(s)),
('char', lambda s: str(s, encoding=UTF_8, errors=unicode_error)),
('varchar', lambda s: str(s, encoding=UTF_8, errors=unicode_error)),
('date', date_parse),
('time', time_parse),
('timestamp', timestamp_parse),
('timestamp_tz', timestamp_tz_parse),
('interval', None),
('time_tz', None),
('numeric',
lambda s: Decimal(str(s, encoding=UTF_8, errors=unicode_error))),
('bytea', None),
('rle_tuple', None),
]
@classmethod
def data_types(cls):
return tuple([name for name, value in cls._data_type_conversions()])
def convert(self, s):
if s is None:
return
return self.converter(s) if self.converter is not None else s
def __str__(self):
return as_str(str(self.props))
def __unicode__(self):
return as_text(str(self.props))
def __repr__(self):
return as_str(str(self.props))
def __iter__(self):
for prop in self.props:
yield prop
def __getitem__(self, key):
return self.props[key]
| apache-2.0 |
markYoungH/chromium.src | native_client_sdk/src/tools/httpd.py | 9 | 6987 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import BaseHTTPServer
import logging
import multiprocessing
import optparse
import os
import SimpleHTTPServer # pylint: disable=W0611
import socket
import sys
import time
import urlparse
if sys.version_info < (2, 7, 0):
sys.stderr.write("python 2.7 or later is required run this script\n")
sys.exit(1)
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
NACL_SDK_ROOT = os.path.dirname(SCRIPT_DIR)
# We only run from the examples directory so that not too much is exposed
# via this HTTP server. Everything in the directory is served, so there should
# never be anything potentially sensitive in the serving directory, especially
# if the machine might be a multi-user machine and not all users are trusted.
# We only serve via the loopback interface.
def SanityCheckDirectory(dirname):
abs_serve_dir = os.path.abspath(dirname)
# Verify we don't serve anywhere above NACL_SDK_ROOT.
if abs_serve_dir[:len(NACL_SDK_ROOT)] == NACL_SDK_ROOT:
return
logging.error('For security, httpd.py should only be run from within the')
logging.error('example directory tree.')
logging.error('Attempting to serve from %s.' % abs_serve_dir)
logging.error('Run with --no-dir-check to bypass this check.')
sys.exit(1)
class HTTPServer(BaseHTTPServer.HTTPServer):
def __init__(self, *args, **kwargs):
BaseHTTPServer.HTTPServer.__init__(self, *args)
self.running = True
self.result = 0
def Shutdown(self, result=0):
self.running = False
self.result = result
class HTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def _SendNothingAndDie(self, result=0):
self.send_response(200, 'OK')
self.send_header('Content-type', 'text/html')
self.send_header('Content-length', '0')
self.end_headers()
self.server.Shutdown(result)
def do_GET(self):
# Browsing to ?quit=1 will kill the server cleanly.
_, _, _, query, _ = urlparse.urlsplit(self.path)
if query:
params = urlparse.parse_qs(query)
if '1' in params.get('quit', []):
self._SendNothingAndDie()
return
return SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
class LocalHTTPServer(object):
"""Class to start a local HTTP server as a child process."""
def __init__(self, dirname, port):
parent_conn, child_conn = multiprocessing.Pipe()
self.process = multiprocessing.Process(
target=_HTTPServerProcess,
args=(child_conn, dirname, port, {}))
self.process.start()
if parent_conn.poll(10): # wait 10 seconds
self.port = parent_conn.recv()
else:
raise Exception('Unable to launch HTTP server.')
self.conn = parent_conn
def ServeForever(self):
"""Serve until the child HTTP process tells us to stop.
Returns:
The result from the child (as an errorcode), or 0 if the server was
killed not by the child (by KeyboardInterrupt for example).
"""
child_result = 0
try:
# Block on this pipe, waiting for a response from the child process.
child_result = self.conn.recv()
except KeyboardInterrupt:
pass
finally:
self.Shutdown()
return child_result
def ServeUntilSubprocessDies(self, process):
"""Serve until the child HTTP process tells us to stop or |subprocess| dies.
Returns:
The result from the child (as an errorcode), or 0 if |subprocess| died,
or the server was killed some other way (by KeyboardInterrupt for
example).
"""
child_result = 0
try:
while True:
if process.poll() is not None:
child_result = 0
break
if self.conn.poll():
child_result = self.conn.recv()
break
time.sleep(0)
except KeyboardInterrupt:
pass
finally:
self.Shutdown()
return child_result
def Shutdown(self):
"""Send a message to the child HTTP server process and wait for it to
finish."""
self.conn.send(False)
self.process.join()
def GetURL(self, rel_url):
"""Get the full url for a file on the local HTTP server.
Args:
rel_url: A URL fragment to convert to a full URL. For example,
GetURL('foobar.baz') -> 'http://localhost:1234/foobar.baz'
"""
return 'http://localhost:%d/%s' % (self.port, rel_url)
def _HTTPServerProcess(conn, dirname, port, server_kwargs):
"""Run a local httpserver with the given port or an ephemeral port.
This function assumes it is run as a child process using multiprocessing.
Args:
conn: A connection to the parent process. The child process sends
the local port, and waits for a message from the parent to
stop serving. It also sends a "result" back to the parent -- this can
be used to allow a client-side test to notify the server of results.
dirname: The directory to serve. All files are accessible through
http://localhost:<port>/path/to/filename.
port: The port to serve on. If 0, an ephemeral port will be chosen.
server_kwargs: A dict that will be passed as kwargs to the server.
"""
try:
os.chdir(dirname)
httpd = HTTPServer(('', port), HTTPRequestHandler, **server_kwargs)
except socket.error as e:
sys.stderr.write('Error creating HTTPServer: %s\n' % e)
sys.exit(1)
try:
conn.send(httpd.server_address[1]) # the chosen port number
httpd.timeout = 0.5 # seconds
while httpd.running:
# Flush output for MSVS Add-In.
sys.stdout.flush()
sys.stderr.flush()
httpd.handle_request()
if conn.poll():
httpd.running = conn.recv()
except KeyboardInterrupt:
pass
finally:
conn.send(httpd.result)
conn.close()
def main(args):
parser = optparse.OptionParser()
parser.add_option('-C', '--serve-dir',
help='Serve files out of this directory.',
default=os.path.abspath('.'))
parser.add_option('-p', '--port',
help='Run server on this port.', default=5103)
parser.add_option('--no-dir-check', '--no_dir_check',
help='No check to ensure serving from safe directory.',
dest='do_safe_check', action='store_false', default=True)
# To enable bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete httpd.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options, args = parser.parse_args(args)
if options.do_safe_check:
SanityCheckDirectory(options.serve_dir)
server = LocalHTTPServer(options.serve_dir, int(options.port))
# Serve until the client tells us to stop. When it does, it will give us an
# errorcode.
print 'Serving %s on %s...' % (options.serve_dir, server.GetURL(''))
return server.ServeForever()
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
pandeyop/tempest | tempest/api/image/v2/test_images_tags.py | 9 | 1504 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest.api.image import base
from tempest import test
class ImagesTagsTest(base.BaseV2ImageTest):
@test.idempotent_id('10407036-6059-4f95-a2cd-cbbbee7ed329')
def test_update_delete_tags_for_image(self):
body = self.create_image(container_format='bare',
disk_format='raw',
visibility='private')
image_id = body['id']
tag = data_utils.rand_name('tag')
self.addCleanup(self.client.delete_image, image_id)
# Creating image tag and verify it.
self.client.add_image_tag(image_id, tag)
body = self.client.show_image(image_id)
self.assertIn(tag, body['tags'])
# Deleting image tag and verify it.
self.client.delete_image_tag(image_id, tag)
body = self.client.show_image(image_id)
self.assertNotIn(tag, body['tags'])
| apache-2.0 |
TheRinger/piLOG | main.py | 1 | 7962 | # Created by Tyrell Rutledge - tytek2012@gmail.com & IRC Freenode as TheRinger, Contributions welcome and will be credited.
from influxdb import InfluxDBClient
from influxdb import SeriesHelper
import RPi.GPIO as GPIO
import time
import datetime
import logging
import random
import Adafruit_DHT
import sys
import serial
import os
# InfluxDB connections settings
host = 'localhost'
port = 8086
user = 'root'
password = 'root'
dbname = 'test'
dbname2 = 'relays'
myclient = InfluxDBClient(host, port, user, password, dbname)
relayclient = InfluxDBClient(host, port, user, password, dbname2)
rpi_device = True
driver = Adafruit_DHT.DHT11
names = ['fahrenheit','celsius','humidity','co2']
daytime = True # Set by Photocell Reading.. (Tell Co2 to sleep while it's dark, plants don't use co2 at night, no need for timer.)
off_button = False # External Override Button
co2_low = 456 # Co2 must drop to this number before the CO2 Generator will turn on
co2_high = 900 # Once this level of CO2 PPM is reached the CO2 Generator will turn off
temp_low = 75 # At this temperature the exhaust fan will turn off
temp_high = 80 # At this temperature the exhaust fan will turn on
humidity_high = 65 # Excess Co2 creates humidity, at this percent, the exhaust fan should turn on
# Attempt at creating a class that would allow the initialization of several types of sensors ( work in progress )
class Sensor(object):
def __init__(self, pin, name, driver):
self.pin = pin
self.name = name
self.driver = driver
def status(self):
if 'celsius' in self.name:
return self.celsius()
elif 'fahrenheit' in self.name:
return self.fahrenheit()
elif 'humidity' in self.name:
return self.humidity()
else:
print "No Results for that sensor type"
def loc(self):
return "{0} sensor, located on pin {1}".format(self.name, self.pin)
def show(self):
print self.name
def fahrenheit(self):
result = Adafruit_DHT.read_retry(self.driver, self.pin)
celsius = result[1]
fahrenheit = 9.0/5.0 * celsius + 32
return fahrenheit
def humidity(self):
result = Adafruit_DHT.read_retry(self.driver, self.pin)
humidity = result[0]
return humidity
def celsius(self):
result = Adafruit_DHT.read_retry(self.driver, self.pin)
celsius = result[1]
return celsius
def check(self):
if self is not None:
return
else:
print "error"
# Class for the setup of a K30 Co2 Sensor connected to to the serial bus
class Co2(object):
def __init__(self, name, pins, driver):
self.name = name
self.pins = pins
self.driver = driver
self.ser = serial.Serial("/dev/ttyAMA0")
self.ser.flushInput()
print "Serial Connected!"
def __call__(self):
return self.status()
def status(self):
self.ser.write("\xFE\x44\x00\x08\x02\x9F\x25")
time.sleep(.01)
resp = self.ser.read(7)
high = ord(resp[3])
low = ord(resp[4])
co2 = (high*256) + low
return co2
def show(self):
return self.name
def pins(self):
return "Connected to pins: {0}".format(self.pins)
def info(self):
return "{0} sensor, connected to pins {1}".format(str(self.name), str(self.pins))
def flush(self):
print "Flushing {0} sensor".format(self.name)
self.ser.flushInput()
print "Serial Input has been flushed successfully"
# Class for setting up relays
class Relay(object):
def __init__(self, pin, name, start_high=False):
if(rpi_device):
self.pin = pin
self.name = name
GPIO.setmode(GPIO.BOARD)
#Set initial state of pin
if start_high is True:
state = "On"
GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.HIGH)
logging.info("{0} relay initialized on pin {1} and is currently {2}".format(name, pin, state))
else:
state = "Off"
GPIO.setup(self.pin, GPIO.OUT)
logging.info("{0} relay initialized on pin {1} and is currently {2}".format(name, pin, state))
logging.info(" Relay Set ")
def status(self):
port_list = {0:"GPIO.OUT", 1:"GPIO.IN",40:"GPIO.SERIAL",41:"GPIO.SPI",42:"GPIO.I2C",43:"GPIO.HARD_PWM", -1:"GPIO.UNKNOWN"}
# loop through the list of ports/pins querying and displaying the status of each
nothing = "Pin type not available in port_list"
port_used = GPIO.gpio_function(self.pin)
for k,v in port_list.items():
if port_used == k:
return v
else:
logging.info(" Status Unknown ")
return "none"
@property
def state(self):
#0/1 or False/True or LOW/HIGH
if GPIO.input(self.pin) == 0:
return False
elif GPIO.input(self.pin) == 1:
return True
else:
return "error"
@property
def see_state(self):
#0/1 or False/True or LOW/HIGH
if GPIO.input(self.pin) == 0:
x = "1"
return int(x)
elif GPIO.input(self.pin) == 1:
x = "1000"
return int(x)
else:
return "error"
@property
def on(self):
#Enable relay
time.sleep(0.1)
logging.info(" Relay on ")
return GPIO.output(self.pin, True)
@property
def off(self):
#Disable relay
time.sleep(0.1)
logging.info(" Relay off ")
return GPIO.output(self.pin, False)
def clean(self):
GPIO.cleanup(self.pin)
logging.info(" Pin {0}, cleared ").format(self.pin)
# First attempt at hysteresis using python.. This will most likely need revised, with fault protection.
def mon_co2(low, high):
if co2() < co2_low and daytime == True and f.status() <= temp_low:
print " Relay {0} is turning on ".format(name1)
return r1.on
elif co2() > co2_high or off_button == True:
print " Relay {0} is turning off ".format(name1)
return r1.off
def mon_temp(low, high):
if f.status() >= temp_high or h.status >= humidity_high:
print " Relay {0} is turning off ".format(name1)
r1.off
print " Relay {0} turning on off ".format(name2)
return r2.on
#These help get the data ready for pushing to Influxdb, there is a better way and should be implemented.
class Series(SeriesHelper):
class Meta:
client = myclient
series_name = 'sensor.{sensor_name}'
fields = ['stat']
tags = ['sensor_name']
class Relay_Log(SeriesHelper):
class Meta:
client = relayclient
series_name = 'relay.{name}'
fields = ['state']
tags = ['name']
autocommit = True
co2 = Co2(name="K30 Co2", pins="34,33", driver="ttyAMA0") # K30 Co2 Sensor
f = Sensor(pin=23, name="fahrenheit", driver=driver) # DH11 Fahrenheit Sensor
c = Sensor(pin=23, name="celsius", driver=driver) # DH11 Celsius
h = Sensor(pin=23, name="humidity", driver=driver) # DH11 Humidity
r1 = Relay(40, "Co2_Generator", start_high=False) # This SSR-25da controls the on/off of a Co2 Generator device
r2 = Relay(38, "Exhaust_Fan", start_high=False) # This SSR-25da controls the on/off of an Exhaust Fan
name1 = r1.name # Influxdb gives errors unless these are converted to strings first
name2 = r2.name # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
try:
while True:
mon_co2(co2_low, co2_high)
time.sleep(5)
mon_temp(temp_low, temp_high)
time.sleep(5)
state1 = r1.see_state #
Relay_Log(name=name1, state=state1)
state2 = r2.see_state + 2000 # This is done because Graphana has limited support for boolean type logging, and so that logs do not overlap.
Relay_Log(name=name2, state=state2)
statuses = [f.status(), c.status(), h.status(), co2()]
x = 0
while x < len(names):
name = names[x]
status = statuses[x]
print "{0} : {1}".format(name,status)
Series(sensor_name=name, stat=status)
time.sleep(2)
x += 1
#Series._json_body_()
Series.commit()
print ""
print "Sleeping 30 seconds..."
print "InfluxDB is at http://192.168.1.11:8086"
print "Graphana is at http://192.168.1.11:3000"
print ""
time.sleep(30)
except KeyboardInterrupt:
GPIO.cleanup()
pass
exit() | gpl-2.0 |
phodal/growth-code | chapter6/growth_studio/settings.py | 2 | 3466 | """
Django settings for growth_studio project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'q2c4xbdh)hf-$z7v1dyai3n^+(g%l5ogi17rm+rud^ysbx-(h0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [
'127.0.0.1',
'growth.studio.ren'
]
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY= True
X_FRAME_OPTIONS = 'DENY'
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'homepage',
'blog',
]
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
]
ROOT_URLCONF = 'growth_studio.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates/'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'growth_studio.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
)
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
### settings.py file
### settings that are not environment dependent
try:
from .local_settings import *
except ImportError:
pass
| mit |
adiwahyudi/gci16.fossasia.org | scripts/swagger_to_markdown/swagger_to_markdown.py | 10 | 1057 | import sys, json
markdown = ""
def loadJSON(file):
data = open(file, 'r')
return json.load(data)
def parseJSON(d, depth):
if isinstance(d, dict):
parseDict(d, depth)
if isinstance(d, list):
parseList(d, depth)
def parseDict(d, depth):
for k in d:
if isinstance(d[k], unicode):
addValue(k, d[k], depth)
else:
addHeader(k, depth)
parseJSON(d[k], depth+1)
def parseList(l, depth):
for v in l:
if isinstance(v, unicode):
index = l.index(v)
addValue(index, v, depth)
else:
parseDict(v, depth)
def addHeader(value, depth):
chain = '#'*depth+' value '+('#'*depth+'\n')
global markdown
markdown+=chain.replace('value', value.title())
def addValue(key, value, depth):
chain ='* '+str(key)+": "+str(value)+'\n'
global markdown
markdown+=chain
def writeOut(markdown, output_file):
f = open(output_file, 'w+')
f.write(markdown)
input_file = sys.argv[1]
output_file = input_file[:-4]+'markdown'
d = loadJSON(input_file)
parseJSON(d, 1)
markdown = markdown.replace('#######','######')
writeOut(markdown, output_file) | gpl-3.0 |
terrycain/razer_drivers | scripts/driver/firefly_hyperflux/test.py | 3 | 5525 | #!/usr/bin/python3
import argparse
import glob
import os
import time
import random
COLOURS = (b'\xFF\x00\x00', b'\x00\xFF\x00', b'\x00\x00\xFF', b'\xFF\xFF\x00', b'\xFF\x00\xFF', b'\x00\xFF\xFF')
def write_binary(driver_path, device_file, payload):
with open(os.path.join(driver_path, device_file), 'wb') as open_file:
open_file.write(payload)
def read_string(driver_path, device_file):
with open(os.path.join(driver_path, device_file), 'r') as open_file:
return open_file.read().rstrip('\n')
def write_string(driver_path, device_file, payload):
with open(os.path.join(driver_path, device_file), 'w') as open_file:
open_file.write(payload)
def find_devices(vid, pid):
driver_paths = glob.glob(os.path.join('/sys/bus/hid/drivers/razeraccessory', '*:{0:04X}:{1:04X}.*'.format(vid, pid)))
for driver_path in driver_paths:
device_type_path = os.path.join(driver_path, 'device_type')
if os.path.exists(device_type_path):
yield driver_path
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--skip-standard', action='store_true')
parser.add_argument('--skip-custom', action='store_true')
parser.add_argument('--skip-game-led', action='store_true')
parser.add_argument('--skip-macro-led', action='store_true')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
found_chroma = False
for index, driver_path in enumerate(find_devices(0x1532, 0x0068), start=1):
found_chroma = True
print("Razer Firefly_hyperflux {0}\n".format(index))
print("Driver version: {0}".format(read_string(driver_path, 'version')))
print("Driver firmware version: {0}".format(read_string(driver_path, 'firmware_version')))
print("Device serial: {0}".format(read_string(driver_path, 'device_serial')))
print("Device type: {0}".format(read_string(driver_path, 'device_type')))
print("Device mode: {0}".format(read_string(driver_path, 'device_mode')))
# Set to static red so that we have something standard
write_binary(driver_path, 'matrix_effect_static', b'\xFF\x00\x00')
if not args.skip_standard:
print("Starting brightness test. Press enter to begin.")
input()
print("Max brightness...", end='')
write_string(driver_path, 'matrix_brightness', '255')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
print("Half brightness...", end='')
write_string(driver_path, 'matrix_brightness', '128')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
print("Zero brightness...", end='')
write_string(driver_path, 'matrix_brightness', '0')
time.sleep(1)
print("brightness ({0})".format(read_string(driver_path, 'matrix_brightness')))
time.sleep(1)
write_string(driver_path, 'matrix_brightness', '255')
print("Starting other colour effect tests. Press enter to begin.")
input()
print("Green Static")
write_binary(driver_path, 'matrix_effect_static', b'\x00\xFF\x00')
time.sleep(5)
print("Cyan Static")
write_binary(driver_path, 'matrix_effect_static', b'\x00\xFF\xFF')
time.sleep(5)
print("Spectrum")
write_binary(driver_path, 'matrix_effect_spectrum', b'\x00')
time.sleep(10)
print("None")
write_binary(driver_path, 'matrix_effect_none', b'\x00')
time.sleep(5)
print("Wave Left")
write_string(driver_path, 'matrix_effect_wave', '1')
time.sleep(5)
print("Wave Right")
write_string(driver_path, 'matrix_effect_wave', '2')
time.sleep(5)
print("Breathing random")
write_binary(driver_path, 'matrix_effect_breath', b'\x00')
time.sleep(10)
print("Breathing red")
write_binary(driver_path, 'matrix_effect_breath', b'\xFF\x00\x00')
time.sleep(10)
print("Breathing blue-green")
write_binary(driver_path, 'matrix_effect_breath', b'\x00\xFF\x00\x00\x00\xFF')
time.sleep(10)
if not args.skip_custom:
# Custom LEDs all rows
payload_all = b'\x00\x00\x0E'
for i in range(0, 15): # 15 colours 0x00-0x0E
payload_all += random.choice(COLOURS)
payload_m1_5 = b''
for led in (0x00, 0x0E):
led_byte = led.to_bytes(1, byteorder='big')
payload_m1_5 += b'\x00' + led_byte + led_byte + b'\xFF\xFF\xFF'
print("Custom LED matrix colours test. Press enter to begin.")
input()
write_binary(driver_path, 'matrix_custom_frame', payload_all)
write_binary(driver_path, 'matrix_effect_custom', b'\x00')
print("Custom LED matrix partial colours test. First and last led to white. Press enter to begin.")
input()
write_binary(driver_path, 'matrix_custom_frame', payload_m1_5)
write_binary(driver_path, 'matrix_effect_custom', b'\x00')
time.sleep(0.5)
print("Finished")
if not found_chroma:
print("No Fireflies found")
| gpl-2.0 |
rwl/PyCIM | CIM15/IEC62325/IEC62325CIMVersion.py | 1 | 2197 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.Element import Element
class IEC62325CIMVersion(Element):
"""IEC 62325 version number assigned to this UML model.IEC 62325 version number assigned to this UML model.
"""
def __init__(self, date='', version='', *args, **kw_args):
"""Initialises a new 'IEC62325CIMVersion' instance.
@param date: Form is YYYY-MM-DD for example for January 5, 2009 it is 2009-01-05.
@param version: Form is IEC62325CIMXXvYY where XX is the major CIM package version and the YY is the minor version. For example IEC62325CIM10v03.
"""
#: Form is YYYY-MM-DD for example for January 5, 2009 it is 2009-01-05.
self.date = date
#: Form is IEC62325CIMXXvYY where XX is the major CIM package version and the YY is the minor version. For example IEC62325CIM10v03.
self.version = version
super(IEC62325CIMVersion, self).__init__(*args, **kw_args)
_attrs = ["date", "version"]
_attr_types = {"date": str, "version": str}
_defaults = {"date": '', "version": ''}
_enums = {}
_refs = []
_many_refs = []
| mit |
proxysh/Safejumper-for-Mac | buildmac/Resources/env/lib/python2.7/site-packages/twisted/python/systemd.py | 15 | 2834 | # -*- test-case-name: twisted.python.test.test_systemd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Integration with systemd.
Currently only the minimum APIs necessary for using systemd's socket activation
feature are supported.
"""
from __future__ import division, absolute_import
__all__ = ['ListenFDs']
from os import getpid
class ListenFDs(object):
"""
L{ListenFDs} provides access to file descriptors inherited from systemd.
Typically L{ListenFDs.fromEnvironment} should be used to construct a new
instance of L{ListenFDs}.
@cvar _START: File descriptors inherited from systemd are always
consecutively numbered, with a fixed lowest "starting" descriptor. This
gives the default starting descriptor. Since this must agree with the
value systemd is using, it typically should not be overridden.
@type _START: C{int}
@ivar _descriptors: A C{list} of C{int} giving the descriptors which were
inherited.
"""
_START = 3
def __init__(self, descriptors):
"""
@param descriptors: The descriptors which will be returned from calls to
C{inheritedDescriptors}.
"""
self._descriptors = descriptors
@classmethod
def fromEnvironment(cls, environ=None, start=None):
"""
@param environ: A dictionary-like object to inspect to discover
inherited descriptors. By default, L{None}, indicating that the
real process environment should be inspected. The default is
suitable for typical usage.
@param start: An integer giving the lowest value of an inherited
descriptor systemd will give us. By default, L{None}, indicating
the known correct (that is, in agreement with systemd) value will be
used. The default is suitable for typical usage.
@return: A new instance of C{cls} which can be used to look up the
descriptors which have been inherited.
"""
if environ is None:
from os import environ
if start is None:
start = cls._START
descriptors = []
try:
pid = int(environ['LISTEN_PID'])
except (KeyError, ValueError):
pass
else:
if pid == getpid():
try:
count = int(environ['LISTEN_FDS'])
except (KeyError, ValueError):
pass
else:
descriptors = range(start, start + count)
del environ['LISTEN_PID'], environ['LISTEN_FDS']
return cls(descriptors)
def inheritedDescriptors(self):
"""
@return: The configured list of descriptors.
"""
return list(self._descriptors)
| gpl-2.0 |
hradec/cortex | test/IECoreMaya/TemporaryAttributeValuesTest.py | 12 | 8969 | ##########################################################################
#
# Copyright (c) 2009-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import os
import maya.cmds
import IECore
import IECoreMaya
class TemporaryAttributeValuesTest( IECoreMaya.TestCase ) :
def test( self ) :
s = maya.cmds.spaceLocator()[0]
maya.cmds.addAttr( s, at="enum", sn="enumTest", enumName="A:B:C", defaultValue = 1 )
self.assertEqual( maya.cmds.getAttr( s + ".enumTest" ), 1 )
maya.cmds.addAttr( s, at="bool", sn="boolTest", defaultValue=1 )
self.assertEqual( maya.cmds.getAttr( s + ".boolTest" ), 1 )
maya.cmds.addAttr( s, at="float", sn="floatTest" )
self.assertEqual( maya.cmds.getAttr( s + ".floatTest" ), 0 )
maya.cmds.addAttr( s, at="long", sn="intTest" )
self.assertEqual( maya.cmds.getAttr( s + ".intTest" ), 0 )
maya.cmds.addAttr( s, at="float2", sn="float2Test" )
maya.cmds.addAttr( s, at="float", sn="float2TestX", parent="float2Test" )
maya.cmds.addAttr( s, at="float", sn="float2TestY", parent="float2Test" )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestX" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestY" ), 0 )
maya.cmds.addAttr( s, at="long2", sn="int2Test" )
maya.cmds.addAttr( s, at="long", sn="int2TestX", parent="int2Test", defaultValue=1 )
maya.cmds.addAttr( s, at="long", sn="int2TestY", parent="int2Test", defaultValue=2 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestX" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestY" ), 2 )
maya.cmds.addAttr( s, at="float3", sn="float3Test" )
maya.cmds.addAttr( s, at="float", sn="float3TestX", parent="float3Test", defaultValue=10 )
maya.cmds.addAttr( s, at="float", sn="float3TestY", parent="float3Test", defaultValue=20 )
maya.cmds.addAttr( s, at="float", sn="float3TestZ", parent="float3Test", defaultValue=30 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestX" ), 10 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestY" ), 20 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestZ" ), 30 )
maya.cmds.addAttr( s, at="short3", sn="short3Test" )
maya.cmds.addAttr( s, at="short", sn="short3TestX", parent="short3Test", defaultValue=101 )
maya.cmds.addAttr( s, at="short", sn="short3TestY", parent="short3Test", defaultValue=201 )
maya.cmds.addAttr( s, at="short", sn="short3TestZ", parent="short3Test", defaultValue=301 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestX" ), 101 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestY" ), 201 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestZ" ), 301 )
maya.cmds.addAttr( s, dt="string", sn="stringTest" )
maya.cmds.setAttr( s + ".stringTest", "hi", type="string" )
self.assertEqual( maya.cmds.getAttr( s + ".stringTest" ), "hi" )
context = IECoreMaya.TemporaryAttributeValues(
{
s + ".enumTest" : 2,
s + ".boolTest" : False,
s + ".floatTest" : 10,
s + ".intTest" : 20,
s + ".float2Test" : ( 1, 2 ),
s + ".int2Test" : IECore.V2i( 3, 4 ),
s + ".float3Test" : ( 9, 6, 1 ),
s + ".short3Test" : ( 500, 2, -1 ),
s + ".stringTest" : "bye",
}
)
with context :
self.assertEqual( maya.cmds.getAttr( s + ".enumTest" ), 2 )
self.assertEqual( maya.cmds.getAttr( s + ".boolTest" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".floatTest" ), 10 )
self.assertEqual( maya.cmds.getAttr( s + ".intTest" ), 20 )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestX" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestY" ), 2 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestX" ), 3 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestY" ), 4 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestX" ), 9 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestY" ), 6 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestZ" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestX" ), 500 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestY" ), 2 )
self.assertEqual( maya.cmds.getAttr( s + ".short3TestZ" ), -1 )
self.assertEqual( maya.cmds.getAttr( s + ".stringTest" ), "bye" )
self.assertEqual( maya.cmds.getAttr( s + ".enumTest" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".boolTest" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".floatTest" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".intTest" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestX" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".float2TestY" ), 0 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestX" ), 1 )
self.assertEqual( maya.cmds.getAttr( s + ".int2TestY" ), 2 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestX" ), 10 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestY" ), 20 )
self.assertEqual( maya.cmds.getAttr( s + ".float3TestZ" ), 30 )
self.assertEqual( maya.cmds.getAttr( s + ".stringTest" ), "hi" )
def testNameSpaceAttributes( self ) :
maya.cmds.namespace( add="ns1" )
s = maya.cmds.spaceLocator()[0]
maya.cmds.addAttr( s, at="enum", sn="enumTest", enumName="A:B:C", defaultValue = 1 )
s = maya.cmds.rename( s, "ns1:"+s )
plugPath = s + ".enumTest"
maya.cmds.namespace( set=":" )
self.assertEqual( plugPath, "ns1:locator1.enumTest" )
self.assertEqual( maya.cmds.namespace( exists="ns1" ), True )
self.assertEqual( maya.cmds.namespaceInfo( currentNamespace=True ), ":" )
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
with IECoreMaya.TemporaryAttributeValues( { plugPath : 2 } ) :
self.assertEqual( maya.cmds.getAttr( plugPath ), 2 )
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
def testReferenceAttributes( self ) :
s = maya.cmds.spaceLocator()[0]
maya.cmds.addAttr( s, at="enum", sn="enumTest", enumName="A:B:C", defaultValue = 1 )
plugPath = s + ".enumTest"
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
with IECoreMaya.TemporaryAttributeValues( { plugPath : 2 } ) :
self.assertEqual( maya.cmds.getAttr( plugPath ), 2 )
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
# save it to a file
#######################################################################
maya.cmds.file( rename = os.path.join( os.getcwd(), "test", "IECoreMaya", "temporaryAttributeReference.ma" ) )
referenceScene = maya.cmds.file( force = True, type = "mayaAscii", save = True )
# make a new scene referencing that file
#######################################################################
maya.cmds.file( new = True, force = True )
maya.cmds.file( referenceScene, reference = True, namespace = "ns1" )
plugPath = "ns1:" + s + ".enumTest"
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
with IECoreMaya.TemporaryAttributeValues( { plugPath : 2 } ) :
self.assertEqual( maya.cmds.getAttr( plugPath ), 2 )
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
with IECoreMaya.TemporaryAttributeValues( { plugPath : 0 } ) :
self.assertEqual( maya.cmds.getAttr( plugPath ), 0 )
self.assertEqual( maya.cmds.getAttr( plugPath ), 1 )
def tearDown( self ) :
for f in [
"test/IECoreMaya/temporaryAttributeReference.ma" ,
] :
if os.path.exists( f ) :
os.remove( f )
if __name__ == "__main__":
IECoreMaya.TestProgram()
| bsd-3-clause |
vmindru/ansible | test/units/modules/network/onyx/test_onyx_ospf.py | 68 | 4494 | #
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_ospf
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxOspfModule(TestOnyxModule):
module = onyx_ospf
def setUp(self):
super(TestOnyxOspfModule, self).setUp()
self._ospf_exists = True
self.mock_get_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_config")
self.get_config = self.mock_get_config.start()
self.mock_get_interfaces_config = patch.object(
onyx_ospf.OnyxOspfModule,
"_get_ospf_interfaces_config")
self.get_interfaces_config = self.mock_get_interfaces_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxOspfModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if self._ospf_exists:
config_file = 'onyx_ospf_show.cfg'
self.get_config.return_value = load_fixture(config_file)
config_file = 'onyx_ospf_interfaces_show.cfg'
self.get_interfaces_config.return_value = load_fixture(config_file)
else:
self.get_config.return_value = None
self.get_interfaces_config.return_value = None
self.load_config.return_value = None
def test_ospf_absent_no_change(self):
set_module_args(dict(ospf=3, state='absent'))
self.execute_module(changed=False)
def test_ospf_present_no_change(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=[interface]))
self.execute_module(changed=False)
def test_ospf_present_remove(self):
set_module_args(dict(ospf=2, state='absent'))
commands = ['no router ospf 2']
self.execute_module(changed=True, commands=commands)
def test_ospf_change_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, router_id='10.2.3.5',
interfaces=[interface]))
commands = ['router ospf 2', 'router-id 10.2.3.5', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_remove_router(self):
interface = dict(name='Loopback 1', area='0.0.0.0')
set_module_args(dict(ospf=2, interfaces=[interface]))
commands = ['router ospf 2', 'no router-id', 'exit']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ospf_add_interface(self):
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Loopback 2', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['interface loopback 2 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
def test_ospf_remove_interface(self):
set_module_args(dict(ospf=2, router_id='10.2.3.4'))
commands = ['interface loopback 1 no ip ospf area']
self.execute_module(changed=True, commands=commands)
def test_ospf_add(self):
self._ospf_exists = False
interfaces = [dict(name='Loopback 1', area='0.0.0.0'),
dict(name='Vlan 210', area='0.0.0.0'),
dict(name='Eth1/1', area='0.0.0.0'),
dict(name='Po1', area='0.0.0.0')]
set_module_args(dict(ospf=2, router_id='10.2.3.4',
interfaces=interfaces))
commands = ['router ospf 2', 'router-id 10.2.3.4', 'exit',
'interface loopback 1 ip ospf area 0.0.0.0',
'interface vlan 210 ip ospf area 0.0.0.0',
'interface ethernet 1/1 ip ospf area 0.0.0.0',
'interface port-channel 1 ip ospf area 0.0.0.0']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
konstruktoid/ansible-upstream | lib/ansible/parsing/utils/addresses.py | 241 | 8167 | # Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible.errors import AnsibleParserError, AnsibleError
# Components that match a numeric or alphanumeric begin:end or begin:end:step
# range expression inside square brackets.
numeric_range = r'''
\[
(?:[0-9]+:[0-9]+) # numeric begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
hexadecimal_range = r'''
\[
(?:[0-9a-f]+:[0-9a-f]+) # hexadecimal begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
alphanumeric_range = r'''
\[
(?:
[a-z]:[a-z]| # one-char alphabetic range
[0-9]+:[0-9]+ # ...or a numeric one
)
(?::[0-9]+)? # numeric :step (optional)
\]
'''
# Components that match a 16-bit portion of an IPv6 address in hexadecimal
# notation (0..ffff) or an 8-bit portion of an IPv4 address in decimal notation
# (0..255) or an [x:y(:z)] numeric range.
ipv6_component = r'''
(?:
[0-9a-f]{{1,4}}| # 0..ffff
{range} # or a numeric range
)
'''.format(range=hexadecimal_range)
ipv4_component = r'''
(?:
[01]?[0-9]{{1,2}}| # 0..199
2[0-4][0-9]| # 200..249
25[0-5]| # 250..255
{range} # or a numeric range
)
'''.format(range=numeric_range)
# A hostname label, e.g. 'foo' in 'foo.example.com'. Consists of alphanumeric
# characters plus dashes (and underscores) or valid ranges. The label may not
# start or end with a hyphen or an underscore. This is interpolated into the
# hostname pattern below. We don't try to enforce the 63-char length limit.
label = r'''
(?:[\w]|{range}) # Starts with an alphanumeric or a range
(?:[\w_-]|{range})* # Then zero or more of the same or [_-]
(?<![_-]) # ...as long as it didn't end with [_-]
'''.format(range=alphanumeric_range)
patterns = {
# This matches a square-bracketed expression with a port specification. What
# is inside the square brackets is validated later.
'bracketed_hostport': re.compile(
r'''^
\[(.+)\] # [host identifier]
:([0-9]+) # :port number
$
''', re.X
),
# This matches a bare IPv4 address or hostname (or host pattern including
# [x:y(:z)] ranges) with a port specification.
'hostport': re.compile(
r'''^
((?: # We want to match:
[^:\[\]] # (a non-range character
| # ...or...
\[[^\]]*\] # a complete bracketed expression)
)*) # repeated as many times as possible
:([0-9]+) # followed by a port number
$
''', re.X
),
# This matches an IPv4 address, but also permits range expressions.
'ipv4': re.compile(
r'''^
(?:{i4}\.){{3}}{i4} # Three parts followed by dots plus one
$
'''.format(i4=ipv4_component), re.X | re.I
),
# This matches an IPv6 address, but also permits range expressions.
#
# This expression looks complex, but it really only spells out the various
# combinations in which the basic unit of an IPv6 address (0..ffff) can be
# written, from :: to 1:2:3:4:5:6:7:8, plus the IPv4-in-IPv6 variants such
# as ::ffff:192.0.2.3.
#
# Note that we can't just use ipaddress.ip_address() because we also have to
# accept ranges in place of each component.
'ipv6': re.compile(
r'''^
(?:{0}:){{7}}{0}| # uncompressed: 1:2:3:4:5:6:7:8
(?:{0}:){{1,6}}:| # compressed variants, which are all
(?:{0}:)(?::{0}){{1,6}}| # a::b for various lengths of a,b
(?:{0}:){{2}}(?::{0}){{1,5}}|
(?:{0}:){{3}}(?::{0}){{1,4}}|
(?:{0}:){{4}}(?::{0}){{1,3}}|
(?:{0}:){{5}}(?::{0}){{1,2}}|
(?:{0}:){{6}}(?::{0})| # ...all with 2 <= a+b <= 7
:(?::{0}){{1,6}}| # ::ffff(:ffff...)
{0}?::| # ffff::, ::
# ipv4-in-ipv6 variants
(?:0:){{6}}(?:{0}\.){{3}}{0}|
::(?:ffff:)?(?:{0}\.){{3}}{0}|
(?:0:){{5}}ffff:(?:{0}\.){{3}}{0}
$
'''.format(ipv6_component), re.X | re.I
),
# This matches a hostname or host pattern including [x:y(:z)] ranges.
#
# We roughly follow DNS rules here, but also allow ranges (and underscores).
# In the past, no systematic rules were enforced about inventory hostnames,
# but the parsing context (e.g. shlex.split(), fnmatch.fnmatch()) excluded
# various metacharacters anyway.
#
# We don't enforce DNS length restrictions here (63 characters per label,
# 253 characters total) or make any attempt to process IDNs.
'hostname': re.compile(
r'''^
{label} # We must have at least one label
(?:\.{label})* # Followed by zero or more .labels
$
'''.format(label=label), re.X | re.I | re.UNICODE
),
}
def parse_address(address, allow_ranges=False):
"""
Takes a string and returns a (host, port) tuple. If the host is None, then
the string could not be parsed as a host identifier with an optional port
specification. If the port is None, then no port was specified.
The host identifier may be a hostname (qualified or not), an IPv4 address,
or an IPv6 address. If allow_ranges is True, then any of those may contain
[x:y] range specifications, e.g. foo[1:3] or foo[0:5]-bar[x-z].
The port number is an optional :NN suffix on an IPv4 address or host name,
or a mandatory :NN suffix on any square-bracketed expression: IPv6 address,
IPv4 address, or host name. (This means the only way to specify a port for
an IPv6 address is to enclose it in square brackets.)
"""
# First, we extract the port number if one is specified.
port = None
for matching in ['bracketed_hostport', 'hostport']:
m = patterns[matching].match(address)
if m:
(address, port) = m.groups()
port = int(port)
continue
# What we're left with now must be an IPv4 or IPv6 address, possibly with
# numeric ranges, or a hostname with alphanumeric ranges.
host = None
for matching in ['ipv4', 'ipv6', 'hostname']:
m = patterns[matching].match(address)
if m:
host = address
continue
# If it isn't any of the above, we don't understand it.
if not host:
raise AnsibleError("Not a valid network hostname: %s" % address)
# If we get to this point, we know that any included ranges are valid.
# If the caller is prepared to handle them, all is well.
# Otherwise we treat it as a parse failure.
if not allow_ranges and '[' in host:
raise AnsibleParserError("Detected range in host but was asked to ignore ranges")
return (host, port)
| gpl-3.0 |
castroflavio/ryu | ryu/services/protocols/bgp/info_base/base.py | 5 | 41669 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Defines some model classes related BGP.
These class include types used in saving information sent/received over BGP
sessions.
"""
import abc
from abc import ABCMeta
from abc import abstractmethod
from copy import copy
import logging
import netaddr
from ryu.lib.packet.bgp import RF_IPv4_UC
from ryu.lib.packet.bgp import RouteTargetMembershipNLRI
from ryu.lib.packet.bgp import BGP_ATTR_TYPE_EXTENDED_COMMUNITIES
from ryu.lib.packet.bgp import BGPPathAttributeLocalPref
from ryu.lib.packet.bgp import BGP_ATTR_TYPE_AS_PATH
from ryu.services.protocols.bgp.base import OrderedDict
from ryu.services.protocols.bgp.constants import VPN_TABLE
from ryu.services.protocols.bgp.constants import VRF_TABLE
from ryu.services.protocols.bgp.model import OutgoingRoute
from ryu.services.protocols.bgp.processor import BPR_ONLY_PATH
from ryu.services.protocols.bgp.processor import BPR_UNKNOWN
LOG = logging.getLogger('bgpspeaker.info_base.base')
class Table(object):
"""A container for holding information about destination/prefixes.
Routing information base for a particular afi/safi.
This is a base class which should be sub-classed for different route
family. A table can be uniquely identified by (Route Family, Scope Id).
"""
__metaclass__ = abc.ABCMeta
ROUTE_FAMILY = RF_IPv4_UC
def __init__(self, scope_id, core_service, signal_bus):
self._destinations = dict()
# Scope in which this table exists.
# If this table represents the VRF, then this could be a VPN ID.
# For global/VPN tables this should be None
self._scope_id = scope_id
self._signal_bus = signal_bus
self._core_service = core_service
@property
def route_family(self):
return self.__class__.ROUTE_FAMILY
@property
def core_service(self):
return self._core_service
@property
def scope_id(self):
return self._scope_id
@abstractmethod
def _create_dest(self, nlri):
"""Creates destination specific for this table.
Returns destination that stores information of paths to *nlri*.
"""
raise NotImplementedError()
def itervalues(self):
return self._destinations.itervalues()
def insert(self, path):
self._validate_path(path)
self._validate_nlri(path.nlri)
if path.is_withdraw:
updated_dest = self._insert_withdraw(path)
else:
updated_dest = self._insert_path(path)
return updated_dest
def insert_sent_route(self, sent_route):
self._validate_path(sent_route.path)
dest = self._get_or_create_dest(sent_route.path.nlri)
dest.add_sent_route(sent_route)
def _insert_path(self, path):
"""Add new path to destination identified by given prefix.
"""
assert path.is_withdraw is False
dest = self._get_or_create_dest(path.nlri)
# Add given path to matching Dest.
dest.add_new_path(path)
# Return updated destination.
return dest
def _insert_withdraw(self, path):
"""Appends given path to withdraw list of Destination for given prefix.
"""
assert path.is_withdraw is True
dest = self._get_or_create_dest(path.nlri)
# Add given path to matching destination.
dest.add_withdraw(path)
# Return updated destination.
return dest
def cleanup_paths_for_peer(self, peer):
"""Remove old paths from whose source is `peer`
Old paths have source version number that is less than current peer
version number. Also removes sent paths to this peer.
"""
LOG.debug('Cleaning paths from table %s for peer %s' % (self, peer))
for dest in self.itervalues():
# Remove paths learned from this source
paths_deleted = dest.remove_old_paths_from_source(peer)
# Remove sent paths to this peer
had_sent = dest.remove_sent_route(peer)
if had_sent:
LOG.debug('Removed sent route %s for %s' % (dest.nlri, peer))
# If any paths are removed we enqueue respective destination for
# future processing.
if paths_deleted:
self._signal_bus.dest_changed(dest)
def clean_uninteresting_paths(self, interested_rts):
"""Cleans table of any path that do not have any RT in common
with `interested_rts`.
Parameters:
- `interested_rts`: (set) of RT that are of interest/that need to
be preserved
"""
LOG.debug('Cleaning table %s for given interested RTs %s' %
(self, interested_rts))
uninteresting_dest_count = 0
for dest in self.itervalues():
added_withdraw = \
dest.withdraw_unintresting_paths(interested_rts)
if added_withdraw:
self._signal_bus.dest_changed(dest)
uninteresting_dest_count += 1
return uninteresting_dest_count
def delete_dest_by_nlri(self, nlri):
"""Deletes the destination identified by given prefix.
Returns the deleted destination if a match is found. If not match is
found return None.
"""
self._validate_nlri(nlri)
dest = self._get_dest(nlri)
if dest:
self._destinations.pop(dest)
return dest
def delete_dest(self, dest):
del self._destinations[self._table_key(dest.nlri)]
def _validate_nlri(self, nlri):
"""Validated *nlri* is the type that this table stores/supports.
"""
if not nlri or not (nlri.ROUTE_FAMILY == self.route_family):
raise ValueError('Invalid Vpnv4 prefix given.')
def _validate_path(self, path):
"""Check if given path is an instance of *Path*.
Raises ValueError if given is not a instance of *Path*.
"""
if not path or not (path.route_family == self.route_family):
raise ValueError('Invalid path. Expected instance of'
' Vpnv4 route family path, got %s.' % path)
def _get_or_create_dest(self, nlri):
table_key = self._table_key(nlri)
dest = self._destinations.get(table_key)
# If destination for given prefix does not exist we create it.
if dest is None:
dest = self._create_dest(nlri)
self._destinations[table_key] = dest
return dest
def _get_dest(self, nlri):
table_key = self._table_key(nlri)
dest = self._destinations.get(table_key)
return dest
def is_for_vrf(self):
"""Returns true if this table instance represents a VRF.
"""
return self.scope_id is not None
def __str__(self):
return 'Table(scope_id: %s, rf: %s)' % (self.scope_id,
self.route_family)
@abstractmethod
def _table_key(self, nlri):
"""Return a key that will uniquely identify this NLRI inside
this table.
"""
raise NotImplementedError()
class NonVrfPathProcessingMixin(object):
"""Mixin reacting to best-path selection algorithm on main table
level. Intended to use with "Destination" subclasses.
Applies to most of Destinations except for VrfDest
because they are processed at VRF level, so different logic applies.
"""
def _best_path_lost(self):
self._best_path = None
if self._sent_routes:
# We have to send update-withdraw to all peers to whom old best
# path was sent.
for sent_route in self._sent_routes.values():
sent_path = sent_route.path
withdraw_clone = sent_path.clone(for_withdrawal=True)
outgoing_route = OutgoingRoute(withdraw_clone)
sent_route.sent_peer.enque_outgoing_msg(outgoing_route)
LOG.debug('Sending withdrawal to %s for %s' %
(sent_route.sent_peer, outgoing_route))
# Have to clear sent_route list for this destination as
# best path is removed.
self._sent_routes = {}
def _new_best_path(self, new_best_path):
old_best_path = self._best_path
self._best_path = new_best_path
LOG.debug('New best path selected for destination %s' % (self))
# If old best path was withdrawn
if (old_best_path and old_best_path not in self._known_path_list
and self._sent_routes):
# Have to clear sent_route list for this destination as
# best path is removed.
self._sent_routes = {}
# Communicate that we have new best path to all qualifying
# bgp-peers.
pm = self._core_service.peer_manager
pm.comm_new_best_to_bgp_peers(new_best_path)
# withdraw old best path
if old_best_path and self._sent_routes:
for sent_route in self._sent_routes.values():
sent_path = sent_route.path
withdraw_clone = sent_path.clone(for_withdrawal=True)
outgoing_route = OutgoingRoute(withdraw_clone)
sent_route.sent_peer.enque_outgoing_msg(outgoing_route)
LOG.debug('Sending withdrawal to %s for %s' %
(sent_route.sent_peer, outgoing_route))
class Destination(object):
"""State about a particular destination.
For example, an IP prefix. This is the data-structure that is hung of the
a routing information base table *Table*.
"""
__metaclass__ = abc.ABCMeta
ROUTE_FAMILY = RF_IPv4_UC
def __init__(self, table, nlri):
# Validate arguments.
if table.route_family != self.__class__.ROUTE_FAMILY:
raise ValueError('Table and destination route family '
'do not match.')
# Back-pointer to the table that contains this destination.
self._table = table
self._core_service = table.core_service
self._nlri = nlri
# List of all known processed paths,
self._known_path_list = []
# List of new un-processed paths.
self._new_path_list = []
# Pointer to best-path. One from the the known paths.
self._best_path = None
# Reason current best path was chosen as best path.
self._best_path_reason = None
# List of withdrawn paths.
self._withdraw_list = []
# List of SentRoute objects. This is the Adj-Rib-Out for this
# destination. (key/value: peer/sent_route)
self._sent_routes = {}
# This is an (optional) list of paths that were created as a
# result of exporting this route to other tables.
# self.exported_paths = None
# Automatically generated
#
# On work queue for BGP processor.
# self.next_dest_to_process
# self.prev_dest_to_process
@property
def route_family(self):
return self.__class__.ROUTE_FAMILY
@property
def nlri(self):
return self._nlri
@property
def best_path(self):
return self._best_path
@property
def best_path_reason(self):
return self._best_path_reason
@property
def known_path_list(self):
return self._known_path_list[:]
@property
def sent_routes(self):
return self._sent_routes.values()
def add_new_path(self, new_path):
self._validate_path(new_path)
self._new_path_list.append(new_path)
def add_withdraw(self, withdraw):
self._validate_path(withdraw)
self._withdraw_list.append(withdraw)
def add_sent_route(self, sent_route):
self._sent_routes[sent_route.sent_peer] = sent_route
def remove_sent_route(self, peer):
if self.was_sent_to(peer):
del self._sent_routes[peer]
return True
return False
def was_sent_to(self, peer):
if peer in self._sent_routes.keys():
return True
return False
def _process(self):
"""Calculate best path for this destination.
A destination is processed when known paths to this destination has
changed. We might have new paths or withdrawals of last known paths.
Removes withdrawals and adds new learned paths from known path list.
Uses bgp best-path calculation algorithm on new list of known paths to
choose new best-path. Communicates best-path to core service.
"""
LOG.debug('Processing destination: %s', self)
new_best_path, reason = self._process_paths()
self._best_path_reason = reason
if self._best_path == new_best_path:
return
if new_best_path is None:
# we lost best path
assert not self._known_path_list, repr(self._known_path_list)
return self._best_path_lost()
else:
return self._new_best_path(new_best_path)
@abstractmethod
def _best_path_lost(self):
raise NotImplementedError()
@abstractmethod
def _new_best_path(self, new_best_path):
raise NotImplementedError()
@classmethod
def _validate_path(cls, path):
if not path or path.route_family != cls.ROUTE_FAMILY:
raise ValueError(
'Invalid path. Expected %s path got %s' %
(cls.ROUTE_FAMILY, path)
)
def process(self):
self._process()
if not self._known_path_list and not self._best_path:
self._remove_dest_from_table()
def _remove_dest_from_table(self):
self._table.delete_dest(self)
def remove_old_paths_from_source(self, source):
"""Removes known old paths from *source*.
Returns *True* if any of the known paths were found to be old and
removed/deleted.
"""
assert(source and hasattr(source, 'version_num'))
removed_paths = []
# Iterate over the paths in reverse order as we want to delete paths
# whose source is this peer.
source_ver_num = source.version_num
for path_idx in range(len(self._known_path_list) - 1, -1, -1):
path = self._known_path_list[path_idx]
if (path.source == source and
path.source_version_num < source_ver_num):
# If this peer is source of any paths, remove those path.
del(self._known_path_list[path_idx])
removed_paths.append(path)
return removed_paths
def withdraw_if_sent_to(self, peer):
"""Sends a withdraw for this destination to given `peer`.
Check the records if we indeed advertise this destination to given peer
and if so, creates a withdraw for advertised route and sends it to the
peer.
Parameter:
- `peer`: (Peer) peer to send withdraw to
"""
from ryu.services.protocols.bgp.peer import Peer
if not isinstance(peer, Peer):
raise TypeError('Currently we only support sending withdrawal'
' to instance of peer')
sent_route = self._sent_routes.pop(peer, None)
if not sent_route:
return False
sent_path = sent_route.path
withdraw_clone = sent_path.clone(for_withdrawal=True)
outgoing_route = OutgoingRoute(withdraw_clone)
sent_route.sent_peer.enque_outgoing_msg(outgoing_route)
return True
def _process_paths(self):
"""Calculates best-path among known paths for this destination.
Returns:
- Best path
Modifies destination's state related to stored paths. Removes withdrawn
paths from known paths. Also, adds new paths to known paths.
"""
# First remove the withdrawn paths.
# Note: If we want to support multiple paths per destination we may
# have to maintain sent-routes per path.
self._remove_withdrawals()
# Have to select best-path from available paths and new paths.
# If we do not have any paths, then we no longer have best path.
if not self._known_path_list and len(self._new_path_list) == 1:
# If we do not have any old but one new path
# it becomes best path.
self._known_path_list.append(self._new_path_list[0])
del(self._new_path_list[0])
return self._known_path_list[0], BPR_ONLY_PATH
# If we have a new version of old/known path we use it and delete old
# one.
self._remove_old_paths()
# Collect all new paths into known paths.
self._known_path_list.extend(self._new_path_list)
# Clear new paths as we copied them.
del(self._new_path_list[:])
# If we do not have any paths to this destination, then we do not have
# new best path.
if not self._known_path_list:
return None, BPR_UNKNOWN
# Compute new best path
current_best_path, reason = self._compute_best_known_path()
return current_best_path, reason
def _remove_withdrawals(self):
"""Removes withdrawn paths.
Note:
We may have disproportionate number of withdraws compared to know paths
since not all paths get installed into the table due to bgp policy and
we can receive withdraws for such paths and withdrawals may not be
stopped by the same policies.
"""
LOG.debug('Removing %s withdrawals' % len(self._withdraw_list))
# If we have no withdrawals, we have nothing to do.
if not self._withdraw_list:
return
# If we have some withdrawals and no know-paths, it means it is safe to
# delete these withdraws.
if not self._known_path_list:
LOG.debug('Found %s withdrawals for path(s) that did not get'
' installed.' % len(self._withdraw_list))
del(self._withdraw_list[:])
return
# If we have some known paths and some withdrawals, we find matches and
# delete them first.
matches = set()
w_matches = set()
# Match all withdrawals from destination paths.
for withdraw in self._withdraw_list:
match = None
for path in self._known_path_list:
# We have a match if the source are same.
if path.source == withdraw.source:
match = path
matches.add(path)
w_matches.add(withdraw)
# One withdraw can remove only one path.
break
# We do no have any match for this withdraw.
if not match:
LOG.debug('No matching path for withdraw found, may be path '
'was not installed into table: %s' %
withdraw)
# If we have partial match.
if len(matches) != len(self._withdraw_list):
LOG.debug('Did not find match for some withdrawals. Number of '
'matches(%s), number of withdrawals (%s)' %
(len(matches), len(self._withdraw_list)))
# Clear matching paths and withdrawals.
for match in matches:
self._known_path_list.remove(match)
for w_match in w_matches:
self._withdraw_list.remove(w_match)
def _remove_old_paths(self):
"""Identifies which of known paths are old and removes them.
Known paths will no longer have paths whose new version is present in
new paths.
"""
new_paths = self._new_path_list
known_paths = self._known_path_list
for new_path in new_paths:
old_paths = []
for path in known_paths:
# Here we just check if source is same and not check if path
# version num. as new_paths are implicit withdrawal of old
# paths and when doing RouteRefresh (not EnhancedRouteRefresh)
# we get same paths again.
if new_path.source == path.source:
old_paths.append(path)
break
for old_path in old_paths:
known_paths.remove(old_path)
LOG.debug('Implicit withdrawal of old path, since we have'
' learned new path from same source: %s' % old_path)
def _compute_best_known_path(self):
"""Computes the best path among known paths.
Returns current best path among `known_paths`.
"""
if not self._known_path_list:
from ryu.services.protocols.bgp.processor import BgpProcessorError
raise BgpProcessorError(desc='Need at-least one known path to'
' compute best path')
# We pick the first path as current best path. This helps in breaking
# tie between two new paths learned in one cycle for which best-path
# calculation steps lead to tie.
current_best_path = self._known_path_list[0]
best_path_reason = BPR_ONLY_PATH
for next_path in self._known_path_list[1:]:
from ryu.services.protocols.bgp.processor import compute_best_path
# Compare next path with current best path.
new_best_path, reason = \
compute_best_path(self._core_service.asn, current_best_path,
next_path)
best_path_reason = reason
if new_best_path is not None:
current_best_path = new_best_path
return current_best_path, best_path_reason
def withdraw_unintresting_paths(self, interested_rts):
"""Withdraws paths that are no longer interesting.
For all known paths that do not have any route target in common with
given `interested_rts` we add a corresponding withdraw.
Returns True if we added any withdraws.
"""
add_withdraws = False
for path in self._known_path_list:
if not path.has_rts_in(interested_rts):
self.withdraw_path(path)
add_withdraws = True
return add_withdraws
def withdraw_path(self, path):
if path not in self.known_path_list:
raise ValueError("Path not known, no need to withdraw")
withdraw = path.clone(for_withdrawal=True)
self._withdraw_list.append(withdraw)
def to_dict(self):
return {'table': str(self._table),
'nlri': str(self._nlri),
'paths': self._known_path_list[:],
'withdraws': self._get_num_withdraws()}
def __str__(self):
return ('Destination(table: %s, nlri: %s, paths: %s, withdraws: %s,'
' new paths: %s)' % (self._table, str(self._nlri),
len(self._known_path_list),
len(self._withdraw_list),
len(self._new_path_list)))
def _get_num_valid_paths(self):
return len(self._known_path_list)
def _get_num_withdraws(self):
return len(self._withdraw_list)
def sent_routes_by_peer(self, peer):
"""get sent routes corresponding to specified peer.
Returns SentRoute list.
"""
result = []
for route in self._sent_routes.values():
if route.sent_peer == peer:
result.append(route)
return result
class Path(object):
"""Represents a way of reaching an IP destination.
Also contains other meta-data given to us by a specific source (such as a
peer).
"""
__metaclass__ = ABCMeta
__slots__ = ('_source', '_path_attr_map', '_nlri', '_source_version_num',
'_exported_from', '_nexthop', 'next_path', 'prev_path',
'_is_withdraw', 'med_set_by_target_neighbor')
ROUTE_FAMILY = RF_IPv4_UC
def __init__(self, source, nlri, src_ver_num, pattrs=None, nexthop=None,
is_withdraw=False, med_set_by_target_neighbor=False):
"""Initializes Ipv4 path.
If this path is not a withdraw, then path attribute and nexthop both
should be provided.
Parameters:
- `source`: (Peer/str) source of this path.
- `nlri`: (Vpnv4) Nlri instance for Vpnv4 route family.
- `src_ver_num`: (int) version number of *source* when this path
was learned.
- `pattrs`: (OrderedDict) various path attributes for this path.
- `nexthop`: (str) nexthop advertised for this path.
- `is_withdraw`: (bool) True if this represents a withdrawal.
"""
self.med_set_by_target_neighbor = med_set_by_target_neighbor
if nlri.ROUTE_FAMILY != self.__class__.ROUTE_FAMILY:
raise ValueError('NLRI and Path route families do not'
' match (%s, %s).' %
(nlri.ROUTE_FAMILY, self.__class__.ROUTE_FAMILY))
# Currently paths injected directly into VRF has only one source
# src_peer can be None to denote NC else has to be instance of Peer.
# Paths can be exported from one VRF and then imported into another
# VRF, in such cases it source is denoted as string VPN_TABLE.
if not (source is None or
hasattr(source, 'version_num') or
source in (VRF_TABLE, VPN_TABLE)):
raise ValueError('Invalid or Unsupported source for path: %s' %
source)
# If this path is not a withdraw path, than it should have path-
# attributes and nexthop.
if not is_withdraw and not (pattrs and nexthop):
raise ValueError('Need to provide nexthop and patattrs '
'for path that is not a withdraw.')
# The entity (peer) that gave us this path.
self._source = source
# Path attribute of this path.
if pattrs:
self._path_attr_map = copy(pattrs)
else:
self._path_attr_map = OrderedDict()
# NLRI that this path represents.
self._nlri = nlri
# If given nlri is withdrawn.
self._is_withdraw = is_withdraw
# @see Source.version_num
self._source_version_num = src_ver_num
self._nexthop = nexthop
# Automatically generated.
#
# self.next_path
# self.prev_path
# The Destination from which this path was exported, if any.
self._exported_from = None
@property
def source_version_num(self):
return self._source_version_num
@property
def source(self):
return self._source
@property
def route_family(self):
return self.__class__.ROUTE_FAMILY
@property
def nlri(self):
return self._nlri
@property
def is_withdraw(self):
return self._is_withdraw
@property
def pathattr_map(self):
return copy(self._path_attr_map)
@property
def nexthop(self):
return self._nexthop
def get_pattr(self, pattr_type, default=None):
"""Returns path attribute of given type.
Returns None if we do not attribute of type *pattr_type*.
"""
return self._path_attr_map.get(pattr_type, default)
def clone(self, for_withdrawal=False):
pathattrs = None
if not for_withdrawal:
pathattrs = self.pathattr_map
clone = self.__class__(
self.source,
self.nlri,
self.source_version_num,
pattrs=pathattrs,
nexthop=self.nexthop,
is_withdraw=for_withdrawal
)
return clone
def get_rts(self):
extcomm_attr = self._path_attr_map.get(
BGP_ATTR_TYPE_EXTENDED_COMMUNITIES)
if extcomm_attr is None:
rts = []
else:
rts = extcomm_attr.rt_list
return rts
def has_rts_in(self, interested_rts):
"""Returns True if this `Path` has any `ExtCommunity` attribute
route target common with `interested_rts`.
"""
assert isinstance(interested_rts, set)
curr_rts = self.get_rts()
# Add default RT to path RTs so that we match interest for peers who
# advertised default RT
curr_rts.append(RouteTargetMembershipNLRI.DEFAULT_RT)
return not interested_rts.isdisjoint(curr_rts)
def __str__(self):
return (
'Path(source: %s, nlri: %s, source ver#: %s, '
'path attrs.: %s, nexthop: %s, is_withdraw: %s)' %
(
self._source, self._nlri, self._source_version_num,
self._path_attr_map, self._nexthop, self._is_withdraw
)
)
def __repr__(self):
return ('Path(%s, %s, %s, %s, %s, %s)' % (
self._source, self._nlri, self._source_version_num,
self._path_attr_map, self._nexthop, self._is_withdraw))
class Filter(object):
"""Represents a general filter for in-bound and out-bound filter
================ ==================================================
Attribute Description
================ ==================================================
policy Filter.POLICY_PERMIT or Filter.POLICY_DENY
================ ==================================================
"""
__metaclass__ = ABCMeta
ROUTE_FAMILY = RF_IPv4_UC
POLICY_DENY = 0
POLICY_PERMIT = 1
def __init__(self, policy=POLICY_DENY):
self._policy = policy
@property
def policy(self):
return self._policy
@abstractmethod
def evaluate(self, path):
""" This method evaluates the path.
Returns this object's policy and the result of matching.
If the specified prefix matches this object's prefix and
ge and le condition,
this method returns True as the matching result.
``path`` specifies the path. prefix must be string.
"""
raise NotImplementedError()
@abstractmethod
def clone(self):
""" This method clones Filter object.
Returns Filter object that has the same values with the original one.
"""
raise NotImplementedError()
class PrefixFilter(Filter):
"""
used to specify a prefix for filter.
We can create PrefixFilter object as follows.
prefix_filter = PrefixFilter('10.5.111.0/24',
policy=PrefixFilter.POLICY_PERMIT)
================ ==================================================
Attribute Description
================ ==================================================
prefix A prefix used for this filter
policy PrefixFilter.POLICY.PERMIT or PrefixFilter.POLICY_DENY
ge Prefix length that will be applied to this filter.
ge means greater than or equal.
le Prefix length that will be applied to this filter.
le means less than or equal.
================ ==================================================
For example, when PrefixFilter object is created as follows:
* p = PrefixFilter('10.5.111.0/24',
policy=PrefixFilter.POLICY_DENY,
ge=26, le=28)
prefixes which match 10.5.111.0/24 and its length matches
from 26 to 28 will be filtered.
When this filter is used as an out-filter, it will stop sending
the path to neighbor because of POLICY_DENY.
When this filter is used as in-filter, it will stop importing the path
to the global rib because of POLICY_DENY.
If you specify POLICY_PERMIT, the path is sent to neighbor or imported to
the global rib.
If you don't want to send prefixes 10.5.111.64/26 and 10.5.111.32/27
and 10.5.111.16/28, and allow to send other 10.5.111.0's prefixes,
you can do it by specifying as follows;
* p = PrefixFilter('10.5.111.0/24',
policy=PrefixFilter.POLICY_DENY,
ge=26, le=28).
"""
def __init__(self, prefix, policy, ge=None, le=None):
super(PrefixFilter, self).__init__(policy)
self._prefix = prefix
self._network = netaddr.IPNetwork(prefix)
self._ge = ge
self._le = le
def __cmp__(self, other):
return cmp(self.prefix, other.prefix)
def __repr__(self):
policy = 'PERMIT' \
if self._policy == self.POLICY_PERMIT else 'DENY'
return 'PrefixFilter(prefix=%s,policy=%s,ge=%s,le=%s)'\
% (self._prefix, policy, self._ge, self._le)
@property
def prefix(self):
return self._prefix
@property
def policy(self):
return self._policy
@property
def ge(self):
return self._ge
@property
def le(self):
return self._le
def evaluate(self, path):
""" This method evaluates the prefix.
Returns this object's policy and the result of matching.
If the specified prefix matches this object's prefix and
ge and le condition,
this method returns True as the matching result.
``path`` specifies the path that has prefix.
"""
nlri = path.nlri
result = False
length = nlri.length
net = netaddr.IPNetwork(nlri.prefix)
if net in self._network:
if self._ge is None and self._le is None:
result = True
elif self._ge is None and self._le:
if length <= self._le:
result = True
elif self._ge and self._le is None:
if self._ge <= length:
result = True
elif self._ge and self._le:
if self._ge <= length <= self._le:
result = True
return self.policy, result
def clone(self):
""" This method clones PrefixFilter object.
Returns PrefixFilter object that has the same values with the
original one.
"""
return self.__class__(self.prefix,
policy=self._policy,
ge=self._ge,
le=self._le)
class ASPathFilter(Filter):
"""
used to specify a prefix for AS_PATH attribute.
We can create ASPathFilter object as follows;
* as_path_filter = ASPathFilter(65000,policy=ASPathFilter.TOP)
================ ==================================================
Attribute Description
================ ==================================================
as_number A AS number used for this filter
policy ASPathFilter.POLICY_TOP and ASPathFilter.POLICY_END,
ASPathFilter.POLICY_INCLUDE and
ASPathFilter.POLICY_NOT_INCLUDE are available.
================ ==================================================
Meaning of each policy is as follows;
* POLICY_TOP :
Filter checks if the specified AS number is at the top of
AS_PATH attribute.
* POLICY_END :
Filter checks is the specified AS number
is at the last of AS_PATH attribute.
* POLICY_INCLUDE :
Filter checks if specified AS number
exists in AS_PATH attribute
* POLICY_NOT_INCLUDE :
opposite to POLICY_INCLUDE
"""
POLICY_TOP = 2
POLICY_END = 3
POLICY_INCLUDE = 4
POLICY_NOT_INCLUDE = 5
def __init__(self, as_number, policy):
super(ASPathFilter, self).__init__(policy)
self._as_number = as_number
def __cmp__(self, other):
return cmp(self.as_number, other.as_number)
def __repr__(self):
policy = 'TOP'
if self._policy == self.POLICY_INCLUDE:
policy = 'INCLUDE'
elif self._policy == self.POLICY_NOT_INCLUDE:
policy = 'NOT_INCLUDE'
elif self._policy == self.POLICY_END:
policy = 'END'
return 'ASPathFilter(as_number=%s,policy=%s)'\
% (self._as_number, policy)
@property
def as_number(self):
return self._as_number
@property
def policy(self):
return self._policy
def evaluate(self, path):
""" This method evaluates as_path list.
Returns this object's policy and the result of matching.
If the specified AS number matches this object's AS number
according to the policy,
this method returns True as the matching result.
``path`` specifies the path.
"""
path_aspath = path.pathattr_map.get(BGP_ATTR_TYPE_AS_PATH)
path_seg_list = path_aspath.path_seg_list
if path_seg_list:
path_seg = path_seg_list[0]
else:
path_seg = []
result = False
LOG.debug("path_seg : %s", path_seg)
if self.policy == ASPathFilter.POLICY_TOP:
if len(path_seg) > 0 and path_seg[0] == self._as_number:
result = True
elif self.policy == ASPathFilter.POLICY_INCLUDE:
for aspath in path_seg:
LOG.debug("POLICY_INCLUDE as_number : %s", aspath)
if aspath == self._as_number:
result = True
break
elif self.policy == ASPathFilter.POLICY_END:
if len(path_seg) > 0 and path_seg[-1] == self._as_number:
result = True
elif self.policy == ASPathFilter.POLICY_NOT_INCLUDE:
if self._as_number not in path_seg:
result = True
return self.policy, result
def clone(self):
""" This method clones ASPathFilter object.
Returns ASPathFilter object that has the same values with the
original one.
"""
return self.__class__(self._as_number,
policy=self._policy)
class AttributeMap(object):
"""
This class is used to specify an attribute to add if the path matches
filters.
We can create AttributeMap object as follows;
pref_filter = PrefixFilter('192.168.103.0/30',
PrefixFilter.POLICY_PERMIT)
attribute_map = AttributeMap([pref_filter],
AttributeMap.ATTR_LOCAL_PREF, 250)
speaker.attribute_map_set('192.168.50.102', [attribute_map])
AttributeMap.ATTR_LOCAL_PREF means that 250 is set as a
local preference value if nlri in the path matches pref_filter.
ASPathFilter is also available as a filter. ASPathFilter checks if AS_PATH
attribute in the path matches AS number in the filter.
=================== ==================================================
Attribute Description
=================== ==================================================
filters A list of filter.
Each object should be a Filter class or its sub-class
attr_type A type of attribute to map on filters. Currently
AttributeMap.ATTR_LOCAL_PREF is available.
attr_value A attribute value
=================== ==================================================
"""
ATTR_LOCAL_PREF = '_local_pref'
def __init__(self, filters, attr_type, attr_value):
assert all(isinstance(f, Filter) for f in filters),\
'all the items in filters must be an instance of Filter sub-class'
self.filters = filters
self.attr_type = attr_type
self.attr_value = attr_value
def evaluate(self, path):
""" This method evaluates attributes of the path.
Returns the cause and result of matching.
Both cause and result are returned from filters
that this object contains.
``path`` specifies the path.
"""
result = False
cause = None
for f in self.filters:
cause, result = f.evaluate(path)
if not result:
break
return cause, result
def get_attribute(self):
func = getattr(self, 'get' + self.attr_type)
return func()
def get_local_pref(self):
local_pref_attr = BGPPathAttributeLocalPref(value=self.attr_value)
return local_pref_attr
def clone(self):
""" This method clones AttributeMap object.
Returns AttributeMap object that has the same values with the
original one.
"""
cloned_filters = [f.clone() for f in self.filters]
return self.__class__(cloned_filters, self.attr_type, self.attr_value)
def __repr__(self):
attr_type = 'LOCAL_PREF'\
if self.attr_type == self.ATTR_LOCAL_PREF else None
filter_string = ','.join(repr(f) for f in self.filters)
return 'AttributeMap(filters=[%s],attribute_type=%s,attribute_value=%s)'\
% (filter_string, attr_type, self.attr_value)
| apache-2.0 |
raymonddavis/Angular-SailsJs-SocketIo | web/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| mit |
vijeth-aradhya/coala-bears | tests/js/ESLintBearTest.py | 21 | 1643 | import os
from bears.js.ESLintBear import ESLintBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_good = """function addOne(i) {
if (!isNaN(i)) {
return i+1;
}
return i;
}
addOne(3);
"""
test_bad = """function addOne(i) {
if (i != NaN) {
return i ++
}
else {
return
}
};
"""
test_import_good = """
import test from "./test";
test();
"""
test_import_bad = """
import test from "../test";
test();
"""
test_syntax_error = '{<!@3@^ yeah!/\n'
test_dir = os.path.join(os.path.dirname(__file__), 'test_files')
ESLintBearWithConfigTest = verify_local_bear(
ESLintBear,
valid_files=('',),
invalid_files=(test_bad, test_good),
settings={'eslint_config': os.path.join(test_dir, 'eslintconfig.json')})
ESLintBearWithoutConfigTest = verify_local_bear(
ESLintBear,
valid_files=(test_good, ''),
invalid_files=(test_syntax_error, test_bad))
# If there is an invalid config file, the results cannot be found. So, no
# file gives a result.
ESLintBearWithUnloadablePluginTest = verify_local_bear(
ESLintBear,
valid_files=(test_bad, test_good),
invalid_files=(),
settings={'eslint_config': os.path.join(test_dir,
'eslintconfig_badplugin.json')})
ESLintBearImportTest = verify_local_bear(
ESLintBear,
valid_files=(test_import_good, ),
invalid_files=(test_import_bad, ),
filename=os.path.join(test_dir, 'test.js'),
create_tempfile=False,
settings={'eslint_config': os.path.join(test_dir,
'eslintconfig_import.json')})
| agpl-3.0 |
AlphaX2/FotoShareN9 | 0.9.7/fotoshare_deb/opt/FotoShareN9/simplejson/tests/test_decimal.py | 45 | 2357 | import decimal
from decimal import Decimal
from unittest import TestCase
from StringIO import StringIO
import simplejson as json
class TestDecimal(TestCase):
NUMS = "1.0", "10.00", "1.1", "1234567890.1234567890", "500"
def dumps(self, obj, **kw):
sio = StringIO()
json.dump(obj, sio, **kw)
res = json.dumps(obj, **kw)
self.assertEquals(res, sio.getvalue())
return res
def loads(self, s, **kw):
sio = StringIO(s)
res = json.loads(s, **kw)
self.assertEquals(res, json.load(sio, **kw))
return res
def test_decimal_encode(self):
for d in map(Decimal, self.NUMS):
self.assertEquals(self.dumps(d, use_decimal=True), str(d))
def test_decimal_decode(self):
for s in self.NUMS:
self.assertEquals(self.loads(s, parse_float=Decimal), Decimal(s))
def test_decimal_roundtrip(self):
for d in map(Decimal, self.NUMS):
# The type might not be the same (int and Decimal) but they
# should still compare equal.
self.assertEquals(
self.loads(
self.dumps(d, use_decimal=True), parse_float=Decimal),
d)
self.assertEquals(
self.loads(
self.dumps([d], use_decimal=True), parse_float=Decimal),
[d])
def test_decimal_defaults(self):
d = Decimal('1.1')
# use_decimal=True is the default
self.assertRaises(TypeError, json.dumps, d, use_decimal=False)
self.assertEqual('1.1', json.dumps(d))
self.assertEqual('1.1', json.dumps(d, use_decimal=True))
self.assertRaises(TypeError, json.dump, d, StringIO(),
use_decimal=False)
sio = StringIO()
json.dump(d, sio)
self.assertEqual('1.1', sio.getvalue())
sio = StringIO()
json.dump(d, sio, use_decimal=True)
self.assertEqual('1.1', sio.getvalue())
def test_decimal_reload(self):
# Simulate a subinterpreter that reloads the Python modules but not
# the C code https://github.com/simplejson/simplejson/issues/34
global Decimal
Decimal = reload(decimal).Decimal
import simplejson.encoder
simplejson.encoder.Decimal = Decimal
self.test_decimal_roundtrip()
| gpl-3.0 |
0xMF/pelican | pelican/generators.py | 2 | 28693 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import os
import six
import math
import random
import logging
import shutil
import fnmatch
import calendar
from codecs import open
from collections import defaultdict
from functools import partial
from itertools import chain, groupby
from operator import attrgetter, itemgetter
from jinja2 import (Environment, FileSystemLoader, PrefixLoader, ChoiceLoader,
BaseLoader, TemplateNotFound)
from pelican.contents import Article, Draft, Page, Static, is_valid_content
from pelican.readers import Readers
from pelican.utils import (copy, process_translations, mkdir_p, DateFormatter,
FileStampDataCacher, python_2_unicode_compatible)
from pelican import signals
logger = logging.getLogger(__name__)
@python_2_unicode_compatible
class Generator(object):
"""Baseclass generator"""
def __init__(self, context, settings, path, theme, output_path,
readers_cache_name='', **kwargs):
self.context = context
self.settings = settings
self.path = path
self.theme = theme
self.output_path = output_path
for arg, value in kwargs.items():
setattr(self, arg, value)
self.readers = Readers(self.settings, readers_cache_name)
# templates cache
self._templates = {}
self._templates_path = []
self._templates_path.append(os.path.expanduser(
os.path.join(self.theme, 'templates')))
self._templates_path += self.settings['EXTRA_TEMPLATES_PATHS']
theme_path = os.path.dirname(os.path.abspath(__file__))
simple_loader = FileSystemLoader(os.path.join(theme_path,
"themes", "simple", "templates"))
self.env = Environment(
trim_blocks=True,
lstrip_blocks=True,
loader=ChoiceLoader([
FileSystemLoader(self._templates_path),
simple_loader, # implicit inheritance
PrefixLoader({'!simple': simple_loader}) # explicit one
]),
extensions=self.settings['JINJA_EXTENSIONS'],
)
logger.debug('Template list: %s', self.env.list_templates())
# provide utils.strftime as a jinja filter
self.env.filters.update({'strftime': DateFormatter()})
# get custom Jinja filters from user settings
custom_filters = self.settings['JINJA_FILTERS']
self.env.filters.update(custom_filters)
signals.generator_init.send(self)
def get_template(self, name):
"""Return the template by name.
Use self.theme to get the templates to use, and return a list of
templates ready to use with Jinja2.
"""
if name not in self._templates:
try:
self._templates[name] = self.env.get_template(name + '.html')
except TemplateNotFound:
raise Exception('[templates] unable to load %s.html from %s'
% (name, self._templates_path))
return self._templates[name]
def _include_path(self, path, extensions=None):
"""Inclusion logic for .get_files(), returns True/False
:param path: the path which might be including
:param extensions: the list of allowed extensions (if False, all
extensions are allowed)
"""
if extensions is None:
extensions = tuple(self.readers.extensions)
basename = os.path.basename(path)
#check IGNORE_FILES
ignores = self.settings['IGNORE_FILES']
if any(fnmatch.fnmatch(basename, ignore) for ignore in ignores):
return False
if extensions is False or basename.endswith(extensions):
return True
return False
def get_files(self, paths, exclude=[], extensions=None):
"""Return a list of files to use, based on rules
:param paths: the list pf paths to search (relative to self.path)
:param exclude: the list of path to exclude
:param extensions: the list of allowed extensions (if False, all
extensions are allowed)
"""
if isinstance(paths, six.string_types):
paths = [paths] # backward compatibility for older generators
files = []
for path in paths:
root = os.path.join(self.path, path)
if os.path.isdir(root):
for dirpath, dirs, temp_files in os.walk(root, followlinks=True):
for e in exclude:
if e in dirs:
dirs.remove(e)
reldir = os.path.relpath(dirpath, self.path)
for f in temp_files:
fp = os.path.join(reldir, f)
if self._include_path(fp, extensions):
files.append(fp)
elif os.path.exists(root) and self._include_path(path, extensions):
files.append(path) # can't walk non-directories
return files
def add_source_path(self, content):
location = content.get_relative_source_path()
self.context['filenames'][location] = content
def _update_context(self, items):
"""Update the context with the given items from the currrent
processor.
"""
for item in items:
value = getattr(self, item)
if hasattr(value, 'items'):
value = list(value.items()) # py3k safeguard for iterators
self.context[item] = value
def __str__(self):
# return the name of the class for logging purposes
return self.__class__.__name__
class CachingGenerator(Generator, FileStampDataCacher):
'''Subclass of Generator and FileStampDataCacher classes
enables content caching, either at the generator or reader level
'''
def __init__(self, *args, **kwargs):
'''Initialize the generator, then set up caching
note the multiple inheritance structure
'''
cls_name = self.__class__.__name__
Generator.__init__(self, *args,
readers_cache_name=(cls_name + '-Readers'),
**kwargs)
cache_this_level = self.settings['CONTENT_CACHING_LAYER'] == 'generator'
caching_policy = cache_this_level and self.settings['CACHE_CONTENT']
load_policy = cache_this_level and self.settings['LOAD_CONTENT_CACHE']
FileStampDataCacher.__init__(self, self.settings, cls_name,
caching_policy, load_policy
)
def _get_file_stamp(self, filename):
'''Get filestamp for path relative to generator.path'''
filename = os.path.join(self.path, filename)
return super(CachingGenerator, self)._get_file_stamp(filename)
class _FileLoader(BaseLoader):
def __init__(self, path, basedir):
self.path = path
self.fullpath = os.path.join(basedir, path)
def get_source(self, environment, template):
if template != self.path or not os.path.exists(self.fullpath):
raise TemplateNotFound(template)
mtime = os.path.getmtime(self.fullpath)
with open(self.fullpath, 'r', encoding='utf-8') as f:
source = f.read()
return (source, self.fullpath,
lambda: mtime == os.path.getmtime(self.fullpath))
class TemplatePagesGenerator(Generator):
def generate_output(self, writer):
for source, dest in self.settings['TEMPLATE_PAGES'].items():
self.env.loader.loaders.insert(0, _FileLoader(source, self.path))
try:
template = self.env.get_template(source)
rurls = self.settings['RELATIVE_URLS']
writer.write_file(dest, template, self.context, rurls,
override_output=True)
finally:
del self.env.loader.loaders[0]
class ArticlesGenerator(CachingGenerator):
"""Generate blog articles"""
def __init__(self, *args, **kwargs):
"""initialize properties"""
self.articles = [] # only articles in default language
self.translations = []
self.dates = {}
self.tags = defaultdict(list)
self.categories = defaultdict(list)
self.related_posts = []
self.authors = defaultdict(list)
self.drafts = [] # only drafts in default language
self.drafts_translations = []
super(ArticlesGenerator, self).__init__(*args, **kwargs)
signals.article_generator_init.send(self)
def generate_feeds(self, writer):
"""Generate the feeds from the current context, and output files."""
if self.settings.get('FEED_ATOM'):
writer.write_feed(self.articles, self.context,
self.settings['FEED_ATOM'])
if self.settings.get('FEED_RSS'):
writer.write_feed(self.articles, self.context,
self.settings['FEED_RSS'], feed_type='rss')
if (self.settings.get('FEED_ALL_ATOM')
or self.settings.get('FEED_ALL_RSS')):
all_articles = list(self.articles)
for article in self.articles:
all_articles.extend(article.translations)
all_articles.sort(key=attrgetter('date'), reverse=True)
if self.settings.get('FEED_ALL_ATOM'):
writer.write_feed(all_articles, self.context,
self.settings['FEED_ALL_ATOM'])
if self.settings.get('FEED_ALL_RSS'):
writer.write_feed(all_articles, self.context,
self.settings['FEED_ALL_RSS'],
feed_type='rss')
for cat, arts in self.categories:
arts.sort(key=attrgetter('date'), reverse=True)
if self.settings.get('CATEGORY_FEED_ATOM'):
writer.write_feed(arts, self.context,
self.settings['CATEGORY_FEED_ATOM']
% cat.slug)
if self.settings.get('CATEGORY_FEED_RSS'):
writer.write_feed(arts, self.context,
self.settings['CATEGORY_FEED_RSS']
% cat.slug, feed_type='rss')
for auth, arts in self.authors:
arts.sort(key=attrgetter('date'), reverse=True)
if self.settings.get('AUTHOR_FEED_ATOM'):
writer.write_feed(arts, self.context,
self.settings['AUTHOR_FEED_ATOM']
% auth.slug)
if self.settings.get('AUTHOR_FEED_RSS'):
writer.write_feed(arts, self.context,
self.settings['AUTHOR_FEED_RSS']
% auth.slug, feed_type='rss')
if (self.settings.get('TAG_FEED_ATOM')
or self.settings.get('TAG_FEED_RSS')):
for tag, arts in self.tags.items():
arts.sort(key=attrgetter('date'), reverse=True)
if self.settings.get('TAG_FEED_ATOM'):
writer.write_feed(arts, self.context,
self.settings['TAG_FEED_ATOM']
% tag.slug)
if self.settings.get('TAG_FEED_RSS'):
writer.write_feed(arts, self.context,
self.settings['TAG_FEED_RSS'] % tag.slug,
feed_type='rss')
if (self.settings.get('TRANSLATION_FEED_ATOM')
or self.settings.get('TRANSLATION_FEED_RSS')):
translations_feeds = defaultdict(list)
for article in chain(self.articles, self.translations):
translations_feeds[article.lang].append(article)
for lang, items in translations_feeds.items():
items.sort(key=attrgetter('date'), reverse=True)
if self.settings.get('TRANSLATION_FEED_ATOM'):
writer.write_feed(
items, self.context,
self.settings['TRANSLATION_FEED_ATOM'] % lang)
if self.settings.get('TRANSLATION_FEED_RSS'):
writer.write_feed(
items, self.context,
self.settings['TRANSLATION_FEED_RSS'] % lang,
feed_type='rss')
def generate_articles(self, write):
"""Generate the articles."""
for article in chain(self.translations, self.articles):
signals.article_generator_write_article.send(self, content=article)
write(article.save_as, self.get_template(article.template),
self.context, article=article, category=article.category,
override_output=hasattr(article, 'override_save_as'))
def generate_period_archives(self, write):
"""Generate per-year, per-month, and per-day archives."""
try:
template = self.get_template('period_archives')
except Exception:
template = self.get_template('archives')
period_save_as = {
'year': self.settings['YEAR_ARCHIVE_SAVE_AS'],
'month': self.settings['MONTH_ARCHIVE_SAVE_AS'],
'day': self.settings['DAY_ARCHIVE_SAVE_AS'],
}
period_date_key = {
'year': attrgetter('date.year'),
'month': attrgetter('date.year', 'date.month'),
'day': attrgetter('date.year', 'date.month', 'date.day')
}
def _generate_period_archives(dates, key, save_as_fmt):
"""Generate period archives from `dates`, grouped by
`key` and written to `save_as`.
"""
# `dates` is already sorted by date
for _period, group in groupby(dates, key=key):
archive = list(group)
# arbitrarily grab the first date so that the usual
# format string syntax can be used for specifying the
# period archive dates
date = archive[0].date
save_as = save_as_fmt.format(date=date)
context = self.context.copy()
if key == period_date_key['year']:
context["period"] = (_period,)
else:
month_name = calendar.month_name[_period[1]]
if not six.PY3:
month_name = month_name.decode('utf-8')
if key == period_date_key['month']:
context["period"] = (_period[0],
month_name)
else:
context["period"] = (_period[0],
month_name,
_period[2])
write(save_as, template, context,
dates=archive, blog=True)
for period in 'year', 'month', 'day':
save_as = period_save_as[period]
if save_as:
key = period_date_key[period]
_generate_period_archives(self.dates, key, save_as)
def generate_direct_templates(self, write):
"""Generate direct templates pages"""
PAGINATED_TEMPLATES = self.settings['PAGINATED_DIRECT_TEMPLATES']
for template in self.settings['DIRECT_TEMPLATES']:
paginated = {}
if template in PAGINATED_TEMPLATES:
paginated = {'articles': self.articles, 'dates': self.dates}
save_as = self.settings.get("%s_SAVE_AS" % template.upper(),
'%s.html' % template)
if not save_as:
continue
write(save_as, self.get_template(template),
self.context, blog=True, paginated=paginated,
page_name=os.path.splitext(save_as)[0])
def generate_tags(self, write):
"""Generate Tags pages."""
tag_template = self.get_template('tag')
for tag, articles in self.tags.items():
articles.sort(key=attrgetter('date'), reverse=True)
dates = [article for article in self.dates if article in articles]
write(tag.save_as, tag_template, self.context, tag=tag,
articles=articles, dates=dates,
paginated={'articles': articles, 'dates': dates},
page_name=tag.page_name, all_articles=self.articles)
def generate_categories(self, write):
"""Generate category pages."""
category_template = self.get_template('category')
for cat, articles in self.categories:
articles.sort(key=attrgetter('date'), reverse=True)
dates = [article for article in self.dates if article in articles]
write(cat.save_as, category_template, self.context,
category=cat, articles=articles, dates=dates,
paginated={'articles': articles, 'dates': dates},
page_name=cat.page_name, all_articles=self.articles)
def generate_authors(self, write):
"""Generate Author pages."""
author_template = self.get_template('author')
for aut, articles in self.authors:
articles.sort(key=attrgetter('date'), reverse=True)
dates = [article for article in self.dates if article in articles]
write(aut.save_as, author_template, self.context,
author=aut, articles=articles, dates=dates,
paginated={'articles': articles, 'dates': dates},
page_name=aut.page_name, all_articles=self.articles)
def generate_drafts(self, write):
"""Generate drafts pages."""
for draft in chain(self.drafts_translations, self.drafts):
write(draft.save_as, self.get_template(draft.template),
self.context, article=draft, category=draft.category,
override_output=hasattr(draft, 'override_save_as'),
all_articles=self.articles)
def generate_pages(self, writer):
"""Generate the pages on the disk"""
write = partial(writer.write_file,
relative_urls=self.settings['RELATIVE_URLS'])
# to minimize the number of relative path stuff modification
# in writer, articles pass first
self.generate_articles(write)
self.generate_period_archives(write)
self.generate_direct_templates(write)
# and subfolders after that
self.generate_tags(write)
self.generate_categories(write)
self.generate_authors(write)
self.generate_drafts(write)
def generate_context(self):
"""Add the articles into the shared context"""
all_articles = []
all_drafts = []
for f in self.get_files(
self.settings['ARTICLE_PATHS'],
exclude=self.settings['ARTICLE_EXCLUDES']):
article = self.get_cached_data(f, None)
if article is None:
try:
article = self.readers.read_file(
base_path=self.path, path=f, content_class=Article,
context=self.context,
preread_signal=signals.article_generator_preread,
preread_sender=self,
context_signal=signals.article_generator_context,
context_sender=self)
except Exception as e:
logger.error('Could not process %s\n%s', f, e,
exc_info=self.settings.get('DEBUG', False))
continue
if not is_valid_content(article, f):
continue
self.cache_data(f, article)
self.add_source_path(article)
if article.status.lower() == "published":
all_articles.append(article)
elif article.status.lower() == "draft":
draft = self.readers.read_file(
base_path=self.path, path=f, content_class=Draft,
context=self.context,
preread_signal=signals.article_generator_preread,
preread_sender=self,
context_signal=signals.article_generator_context,
context_sender=self)
all_drafts.append(draft)
else:
logger.error("Unknown status '%s' for file %s, skipping it.",
article.status, f)
self.articles, self.translations = process_translations(all_articles)
self.drafts, self.drafts_translations = \
process_translations(all_drafts)
signals.article_generator_pretaxonomy.send(self)
for article in self.articles:
# only main articles are listed in categories and tags
# not translations
self.categories[article.category].append(article)
if hasattr(article, 'tags'):
for tag in article.tags:
self.tags[tag].append(article)
# ignore blank authors as well as undefined
for author in getattr(article, 'authors', []):
if author.name != '':
self.authors[author].append(article)
# sort the articles by date
self.articles.sort(key=attrgetter('date'), reverse=True)
self.dates = list(self.articles)
self.dates.sort(key=attrgetter('date'),
reverse=self.context['NEWEST_FIRST_ARCHIVES'])
# create tag cloud
tag_cloud = defaultdict(int)
for article in self.articles:
for tag in getattr(article, 'tags', []):
tag_cloud[tag] += 1
tag_cloud = sorted(tag_cloud.items(), key=itemgetter(1), reverse=True)
tag_cloud = tag_cloud[:self.settings.get('TAG_CLOUD_MAX_ITEMS')]
tags = list(map(itemgetter(1), tag_cloud))
if tags:
max_count = max(tags)
steps = self.settings.get('TAG_CLOUD_STEPS')
# calculate word sizes
self.tag_cloud = [
(
tag,
int(math.floor(steps - (steps - 1) * math.log(count)
/ (math.log(max_count)or 1)))
)
for tag, count in tag_cloud
]
# put words in chaos
random.shuffle(self.tag_cloud)
# and generate the output :)
# order the categories per name
self.categories = list(self.categories.items())
self.categories.sort(
reverse=self.settings['REVERSE_CATEGORY_ORDER'])
self.authors = list(self.authors.items())
self.authors.sort()
self._update_context(('articles', 'dates', 'tags', 'categories',
'tag_cloud', 'authors', 'related_posts'))
self.save_cache()
self.readers.save_cache()
signals.article_generator_finalized.send(self)
def generate_output(self, writer):
self.generate_feeds(writer)
self.generate_pages(writer)
signals.article_writer_finalized.send(self, writer=writer)
class PagesGenerator(CachingGenerator):
"""Generate pages"""
def __init__(self, *args, **kwargs):
self.pages = []
self.hidden_pages = []
self.hidden_translations = []
super(PagesGenerator, self).__init__(*args, **kwargs)
signals.page_generator_init.send(self)
def generate_context(self):
all_pages = []
hidden_pages = []
for f in self.get_files(
self.settings['PAGE_PATHS'],
exclude=self.settings['PAGE_EXCLUDES']):
page = self.get_cached_data(f, None)
if page is None:
try:
page = self.readers.read_file(
base_path=self.path, path=f, content_class=Page,
context=self.context,
preread_signal=signals.page_generator_preread,
preread_sender=self,
context_signal=signals.page_generator_context,
context_sender=self)
except Exception as e:
logger.error('Could not process %s\n%s', f, e,
exc_info=self.settings.get('DEBUG', False))
continue
if not is_valid_content(page, f):
continue
self.cache_data(f, page)
self.add_source_path(page)
if page.status == "published":
all_pages.append(page)
elif page.status == "hidden":
hidden_pages.append(page)
else:
logger.error("Unknown status '%s' for file %s, skipping it.",
page.status, f)
self.pages, self.translations = process_translations(all_pages)
self.hidden_pages, self.hidden_translations = (
process_translations(hidden_pages))
self._update_context(('pages', ))
self.context['PAGES'] = self.pages
self.save_cache()
self.readers.save_cache()
signals.page_generator_finalized.send(self)
def generate_output(self, writer):
for page in chain(self.translations, self.pages,
self.hidden_translations, self.hidden_pages):
writer.write_file(
page.save_as, self.get_template(page.template),
self.context, page=page,
relative_urls=self.settings['RELATIVE_URLS'],
override_output=hasattr(page, 'override_save_as'))
class StaticGenerator(Generator):
"""copy static paths (what you want to copy, like images, medias etc.
to output"""
def __init__(self, *args, **kwargs):
super(StaticGenerator, self).__init__(*args, **kwargs)
signals.static_generator_init.send(self)
def _copy_paths(self, paths, source, destination, output_path,
final_path=None):
"""Copy all the paths from source to destination"""
for path in paths:
if final_path:
copy(os.path.join(source, path),
os.path.join(output_path, destination, final_path))
else:
copy(os.path.join(source, path),
os.path.join(output_path, destination, path))
def generate_context(self):
self.staticfiles = []
for f in self.get_files(self.settings['STATIC_PATHS'],
extensions=False):
static = self.readers.read_file(
base_path=self.path, path=f, content_class=Static,
fmt='static', context=self.context,
preread_signal=signals.static_generator_preread,
preread_sender=self,
context_signal=signals.static_generator_context,
context_sender=self)
self.staticfiles.append(static)
self.add_source_path(static)
self._update_context(('staticfiles',))
signals.static_generator_finalized.send(self)
def generate_output(self, writer):
self._copy_paths(self.settings['THEME_STATIC_PATHS'], self.theme,
self.settings['THEME_STATIC_DIR'], self.output_path,
os.curdir)
# copy all Static files
for sc in self.context['staticfiles']:
source_path = os.path.join(self.path, sc.source_path)
save_as = os.path.join(self.output_path, sc.save_as)
mkdir_p(os.path.dirname(save_as))
shutil.copy2(source_path, save_as)
logger.info('Copying %s to %s', sc.source_path, sc.save_as)
class SourceFileGenerator(Generator):
def generate_context(self):
self.output_extension = self.settings['OUTPUT_SOURCES_EXTENSION']
def _create_source(self, obj):
output_path, _ = os.path.splitext(obj.save_as)
dest = os.path.join(self.output_path,
output_path + self.output_extension)
copy(obj.source_path, dest)
def generate_output(self, writer=None):
logger.info('Generating source files...')
for obj in chain(self.context['articles'], self.context['pages']):
self._create_source(obj)
for obj_trans in obj.translations:
self._create_source(obj_trans)
| agpl-3.0 |
BNUCNL/FreeROI | froi/widgets/listwidget.py | 2 | 16763 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""A viewer (part of Qt model-view-delegate classes) for layer selection
and parameters alternating.
"""
from numpy import array_equal
from numpy import around
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from ..core.labelconfig import LabelConfig
from toolstabwidget import ToolsTabWidget
from basicwidget import BasicWidget
from ..utils import *
class LayerView(QWidget):
"""Implementation a widget for layer selection and parameters alternating.
Inherits QWidget.
"""
current_changed = pyqtSignal()
builtin_colormap = ['gray',
'red2yellow',
'blue2cyanblue',
'red',
'green',
'blue',
'rainbow',
'single ROI']
def __init__(self, label_config_center, parent=None):
"""Initialize the widget."""
super(LayerView, self).__init__(parent)
self.setSizePolicy(QSizePolicy.Maximum, QSizePolicy.Expanding)
self.setMaximumWidth(280)
self._icon_dir = get_icon_dir()
self.label_config_center = label_config_center
# initialize the model
self._model = None
def _init_gui(self):
"""Initialize a GUI designation."""
# initialize QListView
self._list_view = QListView()
# initialize up/down push button
button_size=QSize(12,12)
self._up_button = QPushButton()
self._up_button.setIcon(QIcon(os.path.join(
self._icon_dir, 'arrow_up.png')))
self._up_button.setIconSize(button_size)
self._down_button = QPushButton()
self._down_button.setIcon(QIcon(os.path.join(
self._icon_dir, 'arrow_down.png')))
self._down_button.setIconSize(button_size)
# layout config for list_view panel
button_layout = QHBoxLayout()
# initialize parameter selection widgets
visibility_label = QLabel('Visibility')
self._visibility = QSlider(Qt.Horizontal)
self._visibility.setMinimum(0)
self._visibility.setMaximum(100)
self._visibility.setSingleStep(5)
button_layout.addWidget(visibility_label)
button_layout.addWidget(self._visibility)
button_layout.addWidget(self._up_button)
button_layout.addWidget(self._down_button)
max_label = QLabel('Max:')
self._view_max = QLineEdit()
min_label = QLabel('Min:')
self._view_min = QLineEdit()
colormap_label = QLabel('Colormap:')
self._colormap = QComboBox()
colormaps = self.builtin_colormap + \
self.label_config_center.get_all_labelconfig_names()
self._colormap.addItems(colormaps)
# initialize parameter selection panel
grid_layout = QGridLayout()
grid_layout.addWidget(colormap_label, 1, 0)
grid_layout.addWidget(self._colormap, 1, 1, 1, 3)
para_layout = QHBoxLayout()
para_layout.addWidget(min_label)
para_layout.addWidget(self._view_min)
para_layout.addWidget(max_label)
para_layout.addWidget(self._view_max)
list_view_layout = QVBoxLayout()
list_view_layout.addWidget(self._list_view)
list_view_layout.addLayout(button_layout)
list_view_layout.addLayout(para_layout)
list_view_layout.addLayout(grid_layout)
# initialize cursor coord&value widgets
coord_x_label = QLabel('X:')
self._coord_x = QSpinBox()
self._coord_x.setRange(0, self._model.getY()-1)
self._space_x = QLineEdit()
#self._space_x.setReadOnly(True)
coord_y_label = QLabel('Y:')
self._coord_y = QSpinBox()
self._coord_y.setRange(0, self._model.getX()-1)
self._space_y = QLineEdit()
#self._space_y.setReadOnly(True)
coord_z_label = QLabel('Z:')
self._coord_z = QSpinBox()
self._coord_z.setRange(0, self._model.getZ()-1)
self._space_z = QLineEdit()
#self._space_z.setReadOnly(True)
# Set time point
volume_index_label = QLabel('Volume:')
self._volume_index_spinbox = QSpinBox()
self._volume_index_spinbox.setValue(0)
# voxel value
coord_value_label = QLabel('Value:')
self._coord_value = QLineEdit()
self._coord_value.setReadOnly(True)
# RAS sapce name
space_label = QLabel('Coordinate space:')
space_name = QLabel(self._model.get_space_name())
# layout
value_layout = QHBoxLayout()
value_layout.addWidget(coord_value_label)
value_layout.addWidget(self._coord_value)
vol_layout = QHBoxLayout()
vol_layout.addWidget(volume_index_label)
vol_layout.addWidget(self._volume_index_spinbox)
space_layout = QHBoxLayout()
space_layout.addWidget(space_label)
space_layout.addWidget(space_name)
glayout = QGridLayout()
glayout.addWidget(coord_x_label, 0, 0)
glayout.addWidget(self._coord_x, 0, 1)
glayout.addWidget(self._space_x, 0, 2)
glayout.addWidget(coord_y_label, 1, 0)
glayout.addWidget(self._coord_y, 1, 1)
glayout.addWidget(self._space_y, 1, 2)
glayout.addWidget(coord_z_label, 2, 0)
glayout.addWidget(self._coord_z, 2, 1)
glayout.addWidget(self._space_z, 2, 2)
glayout.addLayout(value_layout, 3, 0, 1, 4)
glayout.addLayout(vol_layout, 4, 0, 1, 2)
glayout.addLayout(space_layout, 5, 0, 1, 4)
self._cursor_info_panel = QGroupBox('Cursor')
self._cursor_info_panel.setLayout(glayout)
self._unity_info_panel = QGroupBox('Common Tools')
self._unity_tabwidget = QTabWidget()
self.basic_widget = BasicWidget(self._model, self)
self.ROItools_widget = ToolsTabWidget(self._model, self)
self._unity_tabwidget.addTab(self.basic_widget, "Basic")
self._unity_tabwidget.addTab(self.ROItools_widget, "ROI")
hlayout = QHBoxLayout(self._unity_info_panel)
hlayout.addWidget(self._unity_tabwidget)
# layout config of whole widget
self.setLayout(QVBoxLayout())
self.layout().addLayout(list_view_layout)
self.layout().addWidget(self._cursor_info_panel)
self.layout().addWidget(self._unity_info_panel)
def setModel(self, model):
"""Set model of the viewer."""
if isinstance(model, QAbstractListModel):
self._model = model
self._init_gui()
self._list_view.setModel(model)
self._create_actions()
else:
raise ValueError('Input must be a ListModel!')
def _create_actions(self):
"""Create several necessay actions."""
# When select one item, display specific parameters
self._list_view.selectionModel().currentChanged.connect(
self._disp_current_para)
# When select one item, display its undo/redo settings
self._list_view.selectionModel().currentChanged.connect(
self.current_changed)
self._list_view.selectionModel().currentChanged.connect(
self.update_xyzvl)
# When dataset changed, refresh display.
self._model.dataChanged.connect(self._disp_current_para)
# When add new item, refresh display.
self._model.rowsInserted.connect(self._disp_current_para)
# When remove new item, refresh display.
self._model.rowsRemoved.connect(self._disp_current_para)
# When layout changed, refresh display.
self._model.layoutChanged.connect(self._disp_current_para)
# When crosshair position changed, refresh coordinate display
self._model.cross_pos_changed.connect(self.update_xyzvl)
# When time point changed, refresh coordinate display.
self._model.time_changed.connect(self.update_xyzvl)
self._model.time_changed.connect(self.current_changed)
# Config setting actions
self._view_min.editingFinished.connect(self._set_view_min)
self._view_max.editingFinished.connect(self._set_view_max)
self._colormap.currentIndexChanged.connect(self._set_colormap)
self._visibility.sliderReleased.connect(self._set_alpha)
self._up_button.clicked.connect(self._up_action)
self._down_button.clicked.connect(self._down_action)
self._volume_index_spinbox.setKeyboardTracking(False)
self._volume_index_spinbox.valueChanged.connect(self._set_volume_idx)
# set voxel ijk position
self._coord_x.valueChanged.connect(self.set_cross_pos)
self._coord_y.valueChanged.connect(self.set_cross_pos)
self._coord_z.valueChanged.connect(self.set_cross_pos)
# set RAS position
self._space_x.editingFinished.connect(self.set_space_pos)
self._space_y.editingFinished.connect(self.set_space_pos)
self._space_z.editingFinished.connect(self.set_space_pos)
def _set_volume_idx(self):
"""Set time point for model."""
idx = self._volume_index_spinbox.value()
self._model.set_time_point(idx)
def _disp_current_para(self):
"""Display current model's parameters."""
index = self._list_view.currentIndex()
if index.row() != -1:
# set up status of up/down button
if index.row() == 0:
self._up_button.setEnabled(False)
else:
self._up_button.setEnabled(True)
if index.row() == (self._model.rowCount() - 1):
self._down_button.setEnabled(False)
else:
self._down_button.setEnabled(True)
# min/max value
self._view_min.setText(str(self._model.data(index, Qt.UserRole)))
self._view_min.setCursorPosition(0)
self._view_max.setText(str(self._model.data(index, Qt.UserRole + 1)))
self._view_max.setCursorPosition(0)
# colormap combo box setting
cur_colormap = self._model.data(index, Qt.UserRole + 3)
if isinstance(cur_colormap, LabelConfig):
cur_colormap = cur_colormap.get_name()
idx = self._colormap.findText(cur_colormap)
self._colormap.setCurrentIndex(idx)
# alpha slider setting
current_alpha = self._model.data(index, Qt.UserRole + 2) * \
100 / 255
self._visibility.setValue(current_alpha)
# volume index settings
if self._model.data(index, Qt.UserRole + 8):
self._volume_index_spinbox.setEnabled(True)
self._volume_index_spinbox.setRange(0,
self._model.data(index, Qt.UserRole + 10) - 1)
volume_idx = self._model.data(index, Qt.UserRole + 9)
if volume_idx != self._volume_index_spinbox.value():
self._volume_index_spinbox.setValue(volume_idx)
else:
self._volume_index_spinbox.valueChanged.disconnect()
self._volume_index_spinbox.setValue(0)
self._volume_index_spinbox.setEnabled(False)
self._volume_index_spinbox.valueChanged.connect(self._set_volume_idx)
self._list_view.setFocus()
# Set current index
self._model.setCurrentIndex(self._list_view.currentIndex())
self._model.setSelectedIndexes()
def _set_view_min(self):
"""Set current selected item's view_min value."""
index = self._list_view.currentIndex()
value = self._view_min.text()
if value == '':
self._view_min.setText(str(self._model.data(index, Qt.UserRole + 12)))
self._view_min.setCursorPosition(0)
else:
self._model.setData(index, value, role=Qt.UserRole)
def _set_view_max(self):
"""Set current selected item's view_max value."""
index = self._list_view.currentIndex()
value = self._view_max.text()
if value == '':
self._view_max.setText(str(self._model.data(index, Qt.UserRole + 13)))
self._view_max.setCursorPosition(0)
else:
self._model.setData(index, value, role=Qt.UserRole + 1)
def _set_colormap(self):
"""Set colormap of current selected item."""
index = self._list_view.currentIndex()
value = self._colormap.currentText()
builtin_len = len(self.builtin_colormap)
row = self._colormap.currentIndex()
if row >= builtin_len:
value = self.label_config_center.get_label_config(row - builtin_len)
self._model.setData(index, value, role=Qt.UserRole + 3)
def _set_alpha(self):
"""Set alpha value of current selected item."""
index = self._list_view.currentIndex()
value = self._visibility.value() * 255 / 100
self._model.setData(index, value, role=Qt.UserRole + 2)
def _up_action(self):
"""Move selected item up for one step."""
index = self._list_view.currentIndex()
self._model.moveUp(index.row())
index = self._list_view.currentIndex()
if index.row() == 0:
self._up_button.setEnabled(False)
else:
self._up_button.setEnabled(True)
if index.row() == (self._model.rowCount() - 1):
self._down_button.setEnabled(False)
else:
self._down_button.setEnabled(True)
self._list_view.setFocus()
def _down_action(self):
"""Move selected item down for one step."""
index = self._list_view.currentIndex()
self._model.moveDown(index.row())
index = self._list_view.currentIndex()
if index.row() == 0:
self._up_button.setEnabled(False)
else:
self._up_button.setEnabled(True)
if index.row() == (self._model.rowCount() - 1):
self._down_button.setEnabled(False)
else:
self._down_button.setEnabled(True)
self._list_view.setFocus()
def currentRow(self):
"""Return the row of current selected item."""
return self._list_view.currentIndex().row()
def setCurrentIndex(self, index):
"""Set selected item."""
self._list_view.setCurrentIndex(index)
def update_xyzvl(self):
"""Update the information of crosshair position."""
# disable signal connection when value changed
self._coord_x.valueChanged.disconnect()
self._coord_y.valueChanged.disconnect()
self._coord_z.valueChanged.disconnect()
xyz = self._model.get_cross_pos()
self._coord_x.setValue(int(xyz[0]))
self._coord_y.setValue(int(xyz[1]))
self._coord_z.setValue(int(xyz[2]))
value = self._model.get_current_value([xyz[0], xyz[1], xyz[2]])
self._coord_value.setText(str(value))
self._coord_value.setCursorPosition(0)
space_xyz = self._model.get_space_pos()
self._space_x.setText(str(around(space_xyz[0], decimals=2)))
self._space_y.setText(str(around(space_xyz[1], decimals=2)))
self._space_z.setText(str(around(space_xyz[2], decimals=2)))
# self._coord_label.setText(self._model.get_current_value_label(value))
# resume signal connection
self._coord_x.valueChanged.connect(self.set_cross_pos)
self._coord_y.valueChanged.connect(self.set_cross_pos)
self._coord_z.valueChanged.connect(self.set_cross_pos)
def set_cross_pos(self):
"""Set position of crosshair."""
new_coord = [int(self._coord_x.value()),
int(self._coord_y.value()),
int(self._coord_z.value())]
self._model.set_cross_pos(new_coord)
def set_space_pos(self):
"""Set RAS position."""
space_xyz = self._model.get_space_pos()
try:
space_x = float(self._space_x.text())
except:
space_x = space_xyz[0]
self._space_x.setText(str(space_xyz[0]))
try:
space_y = float(self._space_y.text())
except:
space_y = space_xyz[1]
self._space_y.setText(str(space_xyz[1]))
try:
space_z = float(self._space_z.text())
except:
space_z = space_xyz[2]
self._space_z.setText(str(space_xyz[2]))
new_coord = [space_x, space_y, space_z]
if not array_equal(new_coord, space_xyz):
self._model.set_space_pos(new_coord)
| bsd-3-clause |
jsvelasquezv/agroind-mobile | platforms/browser/www/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py | 960 | 45344 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
# Map of android build system variables to set.
'aosp_build_settings',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
spec, configs, part_of_all, write_alias_target, sdk_version):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.relative_target = relative_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
elif sdk_version > 0:
self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
'$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
# Grab output directories; needed for Actions and Rules.
if self.toolset == 'host':
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
else:
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
# gyp to sometimes use these instead of system versions, e.g. bison.
# The built host binaries may not be suitable, and can cause errors.
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
# set by envsetup.
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
% main_output)
# Don't allow spaces in input/output filenames, but make an exception for
# filenames which start with '$(' since it's okay for there to be spaces
# inside of make function/macro invocations.
for input in inputs:
if not input.startswith('$(') and ' ' in input:
raise gyp.common.GypError(
'Action input filename "%s" in target %s contains a space' %
(input, self.target))
for output in outputs:
if not output.startswith('$(') and ' ' in output:
raise gyp.common.GypError(
'Action output filename "%s" in target %s contains a space' %
(output, self.target))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn()
self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
# Android uses separate flags for assembly file invocations, but gyp expects
# the same CFLAGS to be applied:
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
middle = make.StringToMakefileVariable(self.target)
return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = ('$(call intermediates-dir-for,%s,%s,true,,'
'$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
self.android_module))
else:
path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
% (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
if lib.startswith('-l'):
ldflags.append(lib)
return (static_lib_modules, dynamic_lib_modules, ldflags)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
if self.type != 'static_library':
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
'$(LOCAL_GYP_LIBS)')
# Link dependencies (i.e. other gyp targets this target depends on)
# These need not be included for static libraries as within the gyp build
# we do not use the implicit include path mechanism.
if self.type != 'static_library':
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
else:
static_link_deps = []
shared_link_deps = []
# Only write the lists if they are non-empty.
if static_libs or static_link_deps:
self.WriteLn('')
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
if dynamic_libs or shared_link_deps:
self.WriteLn('')
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
settings = spec.get('aosp_build_settings', {})
if settings:
self.WriteLn('### Set directly by aosp_build_settings.')
for k, v in settings.iteritems():
if isinstance(v, list):
self.WriteList(v, k)
else:
self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
self.WriteLn('')
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
self.WriteLn('LOCAL_CXX_STL := libc++_static')
# Executables are for build and test purposes only, so they're installed
# to a directory that doesn't get included in the system image.
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
if self.toolset == 'target':
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
else:
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
self.WriteLn()
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return os.path.normpath(path)
def PerformBuild(data, configurations, params):
# The android backend only supports the default configuration.
options = params['options']
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
'GypAndroid.mk'))
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
print 'Building: %s' % arguments
subprocess.check_call(arguments, env=env)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
write_alias_targets = generator_flags.get('write_alias_targets', True)
sdk_version = generator_flags.get('aosp_sdk_version', 0)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
relative_build_file = gyp.common.RelativePath(build_file,
options.toplevel_dir)
build_files.add(relative_build_file)
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = qualified_target in needed_targets
if limit_to_target_all and not part_of_all:
continue
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
toolset)
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
part_of_all=part_of_all,
write_alias_target=write_alias_targets,
sdk_version=sdk_version)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
root_makefile.write('GYP_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_MULTILIB ?= first\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
if write_alias_targets:
root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
| apache-2.0 |
redhat-openstack/manila | manila/tests/api/v1/test_cgsnapshots.py | 2 | 18976 | # Copyright 2015 Alex Meade
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import uuid
import mock
from oslo_config import cfg
import six
import webob
import manila.api.v1.cgsnapshots as cgs
from manila.common import constants
from manila import exception
from manila import test
from manila.tests.api import fakes
CONF = cfg.CONF
class CGSnapshotApiTest(test.TestCase):
def setUp(self):
super(CGSnapshotApiTest, self).setUp()
self.controller = cgs.CGSnapshotController()
self.api_version = '2.4'
self.request = fakes.HTTPRequest.blank('/consistency-groups',
version=self.api_version,
experimental=True)
def _get_fake_cgsnapshot(self, **values):
snap = {
'id': 'fake_id',
'user_id': 'fakeuser',
'project_id': 'fakeproject',
'status': constants.STATUS_CREATING,
'name': None,
'description': None,
'consistency_group_id': None,
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
}
snap.update(**values)
expected_snap = copy.deepcopy(snap)
del expected_snap['user_id']
expected_snap['links'] = mock.ANY
return snap, expected_snap
def _get_fake_simple_cgsnapshot(self, **values):
snap = {
'id': 'fake_id',
'name': None,
}
snap.update(**values)
expected_snap = copy.deepcopy(snap)
expected_snap['links'] = mock.ANY
return snap, expected_snap
def _get_fake_cgsnapshot_member(self, **values):
member = {
'id': 'fake_id',
'user_id': 'fakeuser',
'project_id': 'fakeproject',
'status': constants.STATUS_CREATING,
'cgsnapshot_id': None,
'share_proto': None,
'share_type_id': None,
'share_id': None,
'size': None,
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
}
member.update(**values)
expected_member = copy.deepcopy(member)
del expected_member['user_id']
del expected_member['status']
expected_member['share_protocol'] = member['share_proto']
del expected_member['share_proto']
return member, expected_member
def test_create_invalid_body(self):
body = {"not_cg_snapshot": {}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.request, body)
def test_create_no_consistency_group_id(self):
body = {"cgnapshot": {}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.request, body)
def test_create(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
fake_id = six.text_type(uuid.uuid4())
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(return_value=fake_snap))
body = {"cgsnapshot": {"consistency_group_id": fake_id}}
context = self.request.environ['manila.context']
res_dict = self.controller.create(self.request, body)
self.controller.cg_api.create_cgsnapshot.assert_called_once_with(
context, consistency_group_id=fake_id)
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_create_cg_does_not_exist(self):
fake_id = six.text_type(uuid.uuid4())
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(
side_effect=exception.ConsistencyGroupNotFound(
consistency_group_id=six.text_type(
uuid.uuid4())
)))
body = {"cgsnapshot": {"consistency_group_id": fake_id}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.request, body)
def test_create_cg_does_not_a_uuid(self):
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(
side_effect=exception.ConsistencyGroupNotFound(
consistency_group_id='not_a_uuid'
)))
body = {"cgsnapshot": {"consistency_group_id": "not_a_uuid"}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.request, body)
def test_create_invalid_cg(self):
fake_id = six.text_type(uuid.uuid4())
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(
side_effect=exception.InvalidConsistencyGroup(
reason='bad_status'
)))
body = {"cgsnapshot": {"consistency_group_id": fake_id}}
self.assertRaises(webob.exc.HTTPConflict, self.controller.create,
self.request, body)
def test_create_with_name(self):
fake_name = 'fake_name'
fake_snap, expected_snap = self._get_fake_cgsnapshot(name=fake_name)
fake_id = six.text_type(uuid.uuid4())
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(return_value=fake_snap))
body = {"cgsnapshot": {"consistency_group_id": fake_id,
"name": fake_name}}
context = self.request.environ['manila.context']
res_dict = self.controller.create(self.request, body)
self.controller.cg_api.create_cgsnapshot.assert_called_once_with(
context, consistency_group_id=fake_id, name=fake_name)
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_create_with_description(self):
fake_description = 'fake_description'
fake_snap, expected_snap = self._get_fake_cgsnapshot(
description=fake_description)
fake_id = six.text_type(uuid.uuid4())
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(return_value=fake_snap))
body = {"cgsnapshot": {"consistency_group_id": fake_id,
"description": fake_description}}
context = self.request.environ['manila.context']
res_dict = self.controller.create(self.request, body)
self.controller.cg_api.create_cgsnapshot.assert_called_once_with(
context, consistency_group_id=fake_id,
description=fake_description)
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_create_with_name_and_description(self):
fake_name = 'fake_name'
fake_description = 'fake_description'
fake_id = six.text_type(uuid.uuid4())
fake_snap, expected_snap = self._get_fake_cgsnapshot(
description=fake_description, name=fake_name)
self.mock_object(self.controller.cg_api, 'create_cgsnapshot',
mock.Mock(return_value=fake_snap))
body = {"cgsnapshot": {"consistency_group_id": fake_id,
"description": fake_description,
"name": fake_name}}
context = self.request.environ['manila.context']
res_dict = self.controller.create(self.request, body)
self.controller.cg_api.create_cgsnapshot.assert_called_once_with(
context, consistency_group_id=fake_id, name=fake_name,
description=fake_description)
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_update_with_name_and_description(self):
fake_name = 'fake_name'
fake_description = 'fake_description'
fake_id = six.text_type(uuid.uuid4())
fake_snap, expected_snap = self._get_fake_cgsnapshot(
description=fake_description, name=fake_name)
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(return_value=fake_snap))
self.mock_object(self.controller.cg_api, 'update_cgsnapshot',
mock.Mock(return_value=fake_snap))
body = {"cgsnapshot": {"description": fake_description,
"name": fake_name}}
context = self.request.environ['manila.context']
res_dict = self.controller.update(self.request, fake_id, body)
self.controller.cg_api.update_cgsnapshot.assert_called_once_with(
context, fake_snap,
dict(name=fake_name, description=fake_description))
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_update_snapshot_not_found(self):
body = {"cgsnapshot": {}}
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.request, 'fake_id', body)
def test_update_invalid_body(self):
body = {"not_cgsnapshot": {}}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
self.request, 'fake_id', body)
def test_update_invalid_body_invalid_field(self):
body = {"cgsnapshot": {"unknown_field": ""}}
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
self.request, 'fake_id', body)
self.assertTrue('unknown_field' in six.text_type(exc))
def test_update_invalid_body_readonly_field(self):
body = {"cgsnapshot": {"created_at": []}}
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update,
self.request, 'fake_id', body)
self.assertTrue('created_at' in six.text_type(exc))
def test_list_index(self):
fake_snap, expected_snap = self._get_fake_simple_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap]))
res_dict = self.controller.index(self.request)
self.assertEqual([expected_snap], res_dict['cgsnapshots'])
def test_list_index_no_cgs(self):
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[]))
res_dict = self.controller.index(self.request)
self.assertEqual([], res_dict['cgsnapshots'])
def test_list_index_with_limit(self):
fake_snap, expected_snap = self._get_fake_simple_cgsnapshot()
fake_snap2, expected_snap2 = self._get_fake_simple_cgsnapshot(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap, fake_snap2]))
req = fakes.HTTPRequest.blank('/cgsnapshots?limit=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.index(req)
self.assertEqual(1, len(res_dict['cgsnapshots']))
self.assertEqual([expected_snap], res_dict['cgsnapshots'])
def test_list_index_with_limit_and_offset(self):
fake_snap, expected_snap = self._get_fake_simple_cgsnapshot()
fake_snap2, expected_snap2 = self._get_fake_simple_cgsnapshot(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap, fake_snap2]))
req = fakes.HTTPRequest.blank('/cgsnapshots?limit=1&offset=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.index(req)
self.assertEqual(1, len(res_dict['cgsnapshots']))
self.assertEqual([expected_snap2], res_dict['cgsnapshots'])
def test_list_detail(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap]))
res_dict = self.controller.detail(self.request)
self.assertEqual([expected_snap], res_dict['cgsnapshots'])
def test_list_detail_no_cgs(self):
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[]))
res_dict = self.controller.detail(self.request)
self.assertEqual([], res_dict['cgsnapshots'])
def test_list_detail_with_limit(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
fake_snap2, expected_snap2 = self._get_fake_cgsnapshot(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap, fake_snap2]))
req = fakes.HTTPRequest.blank('/cgsnapshots?limit=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['cgsnapshots']))
self.assertEqual([expected_snap], res_dict['cgsnapshots'])
def test_list_detail_with_limit_and_offset(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
fake_snap2, expected_snap2 = self._get_fake_cgsnapshot(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshots',
mock.Mock(return_value=[fake_snap, fake_snap2]))
req = fakes.HTTPRequest.blank('/cgsnapshots?limit=1&offset=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict['cgsnapshots']))
self.assertEqual([expected_snap2], res_dict['cgsnapshots'])
def test_delete(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(return_value=fake_snap))
self.mock_object(self.controller.cg_api, 'delete_cgsnapshot')
res = self.controller.delete(self.request, fake_snap['id'])
self.assertEqual(202, res.status_code)
def test_delete_not_found(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
self.request, fake_snap['id'])
def test_delete_in_conflicting_status(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(return_value=fake_snap))
self.mock_object(self.controller.cg_api, 'delete_cgsnapshot',
mock.Mock(
side_effect=exception.InvalidCGSnapshot(
reason='blah')))
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
self.request, fake_snap['id'])
def test_show(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(return_value=fake_snap))
res_dict = self.controller.show(self.request, fake_snap['id'])
self.assertEqual(expected_snap, res_dict['cgsnapshot'])
def test_show_cg_not_found(self):
fake_snap, expected_snap = self._get_fake_cgsnapshot()
self.mock_object(self.controller.cg_api, 'get_cgsnapshot',
mock.Mock(side_effect=exception.NotFound))
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
self.request, fake_snap['id'])
def test_members_empty(self):
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshot_members',
mock.Mock(return_value=[]))
res_dict = self.controller.members(self.request, 'fake_cg_id')
self.assertEqual([], res_dict['cgsnapshot_members'])
def test_members(self):
fake_member, expected_member = self._get_fake_cgsnapshot_member()
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshot_members',
mock.Mock(return_value=[fake_member]))
res_dict = self.controller.members(self.request, 'fake_cg_id')
self.assertEqual([expected_member], res_dict['cgsnapshot_members'])
def test_members_with_limit(self):
fake_member, expected_member = self._get_fake_cgsnapshot_member()
fake_member2, expected_member2 = self._get_fake_cgsnapshot_member(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshot_members',
mock.Mock(return_value=[fake_member, fake_member2]))
req = fakes.HTTPRequest.blank('/members?limit=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.members(req, 'fake_cg_id')
self.assertEqual(1, len(res_dict['cgsnapshot_members']))
def test_members_with_limit_and_offset(self):
fake_member, expected_member = self._get_fake_cgsnapshot_member()
fake_member2, expected_member2 = self._get_fake_cgsnapshot_member(
id="fake_id2")
self.mock_object(self.controller.cg_api, 'get_all_cgsnapshot_members',
mock.Mock(return_value=[fake_member, fake_member2]))
req = fakes.HTTPRequest.blank('/members?limit=1&offset=1',
version=self.api_version,
experimental=True)
res_dict = self.controller.members(req, 'fake_cg_id')
self.assertEqual(1, len(res_dict['cgsnapshot_members']))
self.assertEqual([expected_member2], res_dict['cgsnapshot_members'])
| apache-2.0 |
dharmabumstead/ansible | lib/ansible/modules/cloud/ovirt/ovirt_host_storage_facts.py | 73 | 5742 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_host_storage_facts
short_description: Retrieve facts about one or more oVirt/RHV HostStorages (applicable only for block storage)
author: "Daniel Erez (@derez)"
version_added: "2.4"
description:
- "Retrieve facts about one or more oVirt/RHV HostStorages (applicable only for block storage)."
options:
host:
description:
- "Host to get device list from."
required: true
iscsi:
description:
- "Dictionary with values for iSCSI storage type:"
- "C(address) - Address of the iSCSI storage server."
- "C(target) - The target IQN for the storage device."
- "C(username) - A CHAP user name for logging into a target."
- "C(password) - A CHAP password for logging into a target."
fcp:
description:
- "Dictionary with values for fibre channel storage type:"
- "C(address) - Address of the fibre channel storage server."
- "C(port) - Port of the fibre channel storage server."
- "C(lun_id) - LUN id."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about HostStorages with specified target and address:
- ovirt_host_storage_facts:
host: myhost
iscsi:
target: iqn.2016-08-09.domain-01:nickname
address: 10.34.63.204
- debug:
var: ovirt_host_storages
'''
RETURN = '''
ovirt_host_storages:
description: "List of dictionaries describing the HostStorage. HostStorage attribtues are mapped to dictionary keys,
all HostStorage attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/host_storage."
returned: On success.
type: list
'''
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
get_id_by_name,
)
def _login(host_service, iscsi):
host_service.iscsi_login(
iscsi=otypes.IscsiDetails(
username=iscsi.get('username'),
password=iscsi.get('password'),
address=iscsi.get('address'),
target=iscsi.get('target'),
),
)
def _get_storage_type(params):
for sd_type in ['iscsi', 'fcp']:
if params.get(sd_type) is not None:
return sd_type
def main():
argument_spec = ovirt_facts_full_argument_spec(
host=dict(required=True),
iscsi=dict(default=None, type='dict'),
fcp=dict(default=None, type='dict'),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
# Get Host
hosts_service = connection.system_service().hosts_service()
host_id = get_id_by_name(hosts_service, module.params['host'])
storage_type = _get_storage_type(module.params)
host_service = hosts_service.host_service(host_id)
if storage_type == 'iscsi':
# Login
iscsi = module.params.get('iscsi')
_login(host_service, iscsi)
# Get LUNs exposed from the specified target
host_storages = host_service.storage_service().list()
if storage_type == 'iscsi':
filterred_host_storages = [host_storage for host_storage in host_storages
if host_storage.type == otypes.StorageType.ISCSI]
if 'target' in iscsi:
filterred_host_storages = [host_storage for host_storage in filterred_host_storages
if iscsi.get('target') == host_storage.logical_units[0].target]
elif storage_type == 'fcp':
filterred_host_storages = [host_storage for host_storage in host_storages
if host_storage.type == otypes.StorageType.FCP]
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_host_storages=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in filterred_host_storages
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
RJRetro/mame | 3rdparty/portmidi/pm_python/pyportmidi/midi.py | 90 | 19419 | """
Module for interacting with midi input and output.
The midi module can send output to midi devices, and get input
from midi devices. It can also list midi devices on the system.
Including real midi devices, and virtual ones.
It uses the portmidi library. Is portable to which ever platforms
portmidi supports (currently windows, OSX, and linux).
"""
import atexit
_init = False
_pypm = None
__all__ = [ "Input",
"MidiException",
"Output",
"get_count",
"get_default_input_id",
"get_default_output_id",
"get_device_info",
"init",
"quit",
"time",
]
__theclasses__ = ["Input", "Output"]
def init():
"""initialize the midi module
pyportmidi.init(): return None
Call the initialisation function before using the midi module.
It is safe to call this more than once.
"""
global _init, _pypm
if not _init:
import pyportmidi._pyportmidi
_pypm = pyportmidi._pyportmidi
_pypm.Initialize()
_init = True
atexit.register(quit)
def quit():
"""uninitialize the midi module
pyportmidi.quit(): return None
Called automatically atexit if you don't call it.
It is safe to call this function more than once.
"""
global _init, _pypm
if _init:
# TODO: find all Input and Output classes and close them first?
_pypm.Terminate()
_init = False
del _pypm
def _check_init():
if not _init:
raise RuntimeError("pyportmidi not initialised.")
def get_count():
"""gets the number of devices.
pyportmidi.get_count(): return num_devices
Device ids range from 0 to get_count() -1
"""
_check_init()
return _pypm.CountDevices()
def get_default_input_id():
"""gets default input device number
pyportmidi.get_default_input_id(): return default_id
Return the default device ID or -1 if there are no devices.
The result can be passed to the Input()/Ouput() class.
On the PC, the user can specify a default device by
setting an environment variable. For example, to use device #1.
set PM_RECOMMENDED_INPUT_DEVICE=1
The user should first determine the available device ID by using
the supplied application "testin" or "testout".
In general, the registry is a better place for this kind of info,
and with USB devices that can come and go, using integers is not
very reliable for device identification. Under Windows, if
PM_RECOMMENDED_OUTPUT_DEVICE (or PM_RECOMMENDED_INPUT_DEVICE) is
*NOT* found in the environment, then the default device is obtained
by looking for a string in the registry under:
HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Input_Device
and HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Output_Device
for a string. The number of the first device with a substring that
matches the string exactly is returned. For example, if the string
in the registry is "USB", and device 1 is named
"In USB MidiSport 1x1", then that will be the default
input because it contains the string "USB".
In addition to the name, get_device_info() returns "interf", which
is the interface name. (The "interface" is the underlying software
system or API used by PortMidi to access devices. Examples are
MMSystem, DirectX (not implemented), ALSA, OSS (not implemented), etc.)
At present, the only Win32 interface is "MMSystem", the only Linux
interface is "ALSA", and the only Max OS X interface is "CoreMIDI".
To specify both the interface and the device name in the registry,
separate the two with a comma and a space, e.g.:
MMSystem, In USB MidiSport 1x1
In this case, the string before the comma must be a substring of
the "interf" string, and the string after the space must be a
substring of the "name" name string in order to match the device.
Note: in the current release, the default is simply the first device
(the input or output device with the lowest PmDeviceID).
"""
return _pypm.GetDefaultInputDeviceID()
def get_default_output_id():
"""gets default output device number
pyportmidi.get_default_output_id(): return default_id
Return the default device ID or -1 if there are no devices.
The result can be passed to the Input()/Ouput() class.
On the PC, the user can specify a default device by
setting an environment variable. For example, to use device #1.
set PM_RECOMMENDED_OUTPUT_DEVICE=1
The user should first determine the available device ID by using
the supplied application "testin" or "testout".
In general, the registry is a better place for this kind of info,
and with USB devices that can come and go, using integers is not
very reliable for device identification. Under Windows, if
PM_RECOMMENDED_OUTPUT_DEVICE (or PM_RECOMMENDED_INPUT_DEVICE) is
*NOT* found in the environment, then the default device is obtained
by looking for a string in the registry under:
HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Input_Device
and HKEY_LOCAL_MACHINE/SOFTWARE/PortMidi/Recommended_Output_Device
for a string. The number of the first device with a substring that
matches the string exactly is returned. For example, if the string
in the registry is "USB", and device 1 is named
"In USB MidiSport 1x1", then that will be the default
input because it contains the string "USB".
In addition to the name, get_device_info() returns "interf", which
is the interface name. (The "interface" is the underlying software
system or API used by PortMidi to access devices. Examples are
MMSystem, DirectX (not implemented), ALSA, OSS (not implemented), etc.)
At present, the only Win32 interface is "MMSystem", the only Linux
interface is "ALSA", and the only Max OS X interface is "CoreMIDI".
To specify both the interface and the device name in the registry,
separate the two with a comma and a space, e.g.:
MMSystem, In USB MidiSport 1x1
In this case, the string before the comma must be a substring of
the "interf" string, and the string after the space must be a
substring of the "name" name string in order to match the device.
Note: in the current release, the default is simply the first device
(the input or output device with the lowest PmDeviceID).
"""
_check_init()
return _pypm.GetDefaultOutputDeviceID()
def get_device_info(an_id):
""" returns information about a midi device
pyportmidi.get_device_info(an_id): return (interf, name, input, output, opened)
interf - a text string describing the device interface, eg 'ALSA'.
name - a text string for the name of the device, eg 'Midi Through Port-0'
input - 0, or 1 if the device is an input device.
output - 0, or 1 if the device is an output device.
opened - 0, or 1 if the device is opened.
If the id is out of range, the function returns None.
"""
_check_init()
return _pypm.GetDeviceInfo(an_id)
class Input(object):
"""Input is used to get midi input from midi devices.
Input(device_id)
Input(device_id, buffer_size)
buffer_size -the number of input events to be buffered waiting to
be read using Input.read()
"""
def __init__(self, device_id, buffer_size=4096):
"""
The buffer_size specifies the number of input events to be buffered
waiting to be read using Input.read().
"""
_check_init()
if device_id == -1:
raise MidiException("Device id is -1, not a valid output id. -1 usually means there were no default Output devices.")
try:
r = get_device_info(device_id)
except TypeError:
raise TypeError("an integer is required")
except OverflowError:
raise OverflowError("long int too large to convert to int")
# and now some nasty looking error checking, to provide nice error
# messages to the kind, lovely, midi using people of whereever.
if r:
interf, name, input, output, opened = r
if input:
try:
self._input = _pypm.Input(device_id, buffer_size)
except TypeError:
raise TypeError("an integer is required")
self.device_id = device_id
elif output:
raise MidiException("Device id given is not a valid input id, it is an output id.")
else:
raise MidiException("Device id given is not a valid input id.")
else:
raise MidiException("Device id invalid, out of range.")
def _check_open(self):
if self._input is None:
raise MidiException("midi not open.")
def close(self):
""" closes a midi stream, flushing any pending buffers.
Input.close(): return None
PortMidi attempts to close open streams when the application
exits -- this is particularly difficult under Windows.
"""
_check_init()
if not (self._input is None):
self._input.Close()
self._input = None
def read(self, num_events):
"""reads num_events midi events from the buffer.
Input.read(num_events): return midi_event_list
Reads from the Input buffer and gives back midi events.
[[[status,data1,data2,data3],timestamp],
[[status,data1,data2,data3],timestamp],...]
"""
_check_init()
self._check_open()
return self._input.Read(num_events)
def poll(self):
"""returns true if there's data, or false if not.
Input.poll(): return Bool
raises a MidiException on error.
"""
_check_init()
self._check_open()
r = self._input.Poll()
if r == _pypm.TRUE:
return True
elif r == _pypm.FALSE:
return False
else:
err_text = GetErrorText(r)
raise MidiException( (r, err_text) )
class Output(object):
"""Output is used to send midi to an output device
Output(device_id)
Output(device_id, latency = 0)
Output(device_id, buffer_size = 4096)
Output(device_id, latency, buffer_size)
The buffer_size specifies the number of output events to be
buffered waiting for output. (In some cases -- see below --
PortMidi does not buffer output at all and merely passes data
to a lower-level API, in which case buffersize is ignored.)
latency is the delay in milliseconds applied to timestamps to determine
when the output should actually occur. (If latency is < 0, 0 is
assumed.)
If latency is zero, timestamps are ignored and all output is delivered
immediately. If latency is greater than zero, output is delayed until
the message timestamp plus the latency. (NOTE: time is measured
relative to the time source indicated by time_proc. Timestamps are
absolute, not relative delays or offsets.) In some cases, PortMidi
can obtain better timing than your application by passing timestamps
along to the device driver or hardware. Latency may also help you
to synchronize midi data to audio data by matching midi latency to
the audio buffer latency.
"""
def __init__(self, device_id, latency = 0, buffer_size = 4096):
"""Output(device_id)
Output(device_id, latency = 0)
Output(device_id, buffer_size = 4096)
Output(device_id, latency, buffer_size)
The buffer_size specifies the number of output events to be
buffered waiting for output. (In some cases -- see below --
PortMidi does not buffer output at all and merely passes data
to a lower-level API, in which case buffersize is ignored.)
latency is the delay in milliseconds applied to timestamps to determine
when the output should actually occur. (If latency is < 0, 0 is
assumed.)
If latency is zero, timestamps are ignored and all output is delivered
immediately. If latency is greater than zero, output is delayed until
the message timestamp plus the latency. (NOTE: time is measured
relative to the time source indicated by time_proc. Timestamps are
absolute, not relative delays or offsets.) In some cases, PortMidi
can obtain better timing than your application by passing timestamps
along to the device driver or hardware. Latency may also help you
to synchronize midi data to audio data by matching midi latency to
the audio buffer latency.
"""
_check_init()
self._aborted = 0
if device_id == -1:
raise MidiException("Device id is -1, not a valid output id. -1 usually means there were no default Output devices.")
try:
r = get_device_info(device_id)
except TypeError:
raise TypeError("an integer is required")
except OverflowError:
raise OverflowError("long int too large to convert to int")
# and now some nasty looking error checking, to provide nice error
# messages to the kind, lovely, midi using people of whereever.
if r:
interf, name, input, output, opened = r
if output:
try:
self._output = _pypm.Output(device_id, latency)
except TypeError:
raise TypeError("an integer is required")
self.device_id = device_id
elif input:
raise MidiException("Device id given is not a valid output id, it is an input id.")
else:
raise MidiException("Device id given is not a valid output id.")
else:
raise MidiException("Device id invalid, out of range.")
def _check_open(self):
if self._output is None:
raise MidiException("midi not open.")
if self._aborted:
raise MidiException("midi aborted.")
def close(self):
""" closes a midi stream, flushing any pending buffers.
Output.close(): return None
PortMidi attempts to close open streams when the application
exits -- this is particularly difficult under Windows.
"""
_check_init()
if not (self._output is None):
self._output.Close()
self._output = None
def abort(self):
"""terminates outgoing messages immediately
Output.abort(): return None
The caller should immediately close the output port;
this call may result in transmission of a partial midi message.
There is no abort for Midi input because the user can simply
ignore messages in the buffer and close an input device at
any time.
"""
_check_init()
if self._output:
self._output.Abort()
self._aborted = 1
def write(self, data):
"""writes a list of midi data to the Output
Output.write(data)
writes series of MIDI information in the form of a list:
write([[[status <,data1><,data2><,data3>],timestamp],
[[status <,data1><,data2><,data3>],timestamp],...])
<data> fields are optional
example: choose program change 1 at time 20000 and
send note 65 with velocity 100 500 ms later.
write([[[0xc0,0,0],20000],[[0x90,60,100],20500]])
notes:
1. timestamps will be ignored if latency = 0.
2. To get a note to play immediately, send MIDI info with
timestamp read from function Time.
3. understanding optional data fields:
write([[[0xc0,0,0],20000]]) is equivalent to
write([[[0xc0],20000]])
Can send up to 1024 elements in your data list, otherwise an
IndexError exception is raised.
"""
_check_init()
self._check_open()
self._output.Write(data)
def write_short(self, status, data1 = 0, data2 = 0):
"""write_short(status <, data1><, data2>)
Output.write_short(status)
Output.write_short(status, data1 = 0, data2 = 0)
output MIDI information of 3 bytes or less.
data fields are optional
status byte could be:
0xc0 = program change
0x90 = note on
etc.
data bytes are optional and assumed 0 if omitted
example: note 65 on with velocity 100
write_short(0x90,65,100)
"""
_check_init()
self._check_open()
self._output.WriteShort(status, data1, data2)
def write_sys_ex(self, when, msg):
"""writes a timestamped system-exclusive midi message.
Output.write_sys_ex(when, msg)
msg - can be a *list* or a *string*
when - a timestamp in miliseconds
example:
(assuming o is an onput MIDI stream)
o.write_sys_ex(0,'\\xF0\\x7D\\x10\\x11\\x12\\x13\\xF7')
is equivalent to
o.write_sys_ex(pyportmidi.time(),
[0xF0,0x7D,0x10,0x11,0x12,0x13,0xF7])
"""
_check_init()
self._check_open()
self._output.WriteSysEx(when, msg)
def note_on(self, note, velocity=None, channel = 0):
"""turns a midi note on. Note must be off.
Output.note_on(note, velocity=None, channel = 0)
Turn a note on in the output stream. The note must already
be off for this to work correctly.
"""
if velocity is None:
velocity = 0
if not (0 <= channel <= 15):
raise ValueError("Channel not between 0 and 15.")
self.write_short(0x90+channel, note, velocity)
def note_off(self, note, velocity=None, channel = 0):
"""turns a midi note off. Note must be on.
Output.note_off(note, velocity=None, channel = 0)
Turn a note off in the output stream. The note must already
be on for this to work correctly.
"""
if velocity is None:
velocity = 0
if not (0 <= channel <= 15):
raise ValueError("Channel not between 0 and 15.")
self.write_short(0x80 + channel, note, velocity)
def set_instrument(self, instrument_id, channel = 0):
"""select an instrument, with a value between 0 and 127
Output.set_instrument(instrument_id, channel = 0)
"""
if not (0 <= instrument_id <= 127):
raise ValueError("Undefined instrument id: %d" % instrument_id)
if not (0 <= channel <= 15):
raise ValueError("Channel not between 0 and 15.")
self.write_short(0xc0+channel, instrument_id)
def time():
"""returns the current time in ms of the PortMidi timer
pyportmidi.time(): return time
The time is reset to 0, when the module is inited.
"""
return _pypm.Time()
class MidiException(Exception):
"""MidiException(errno) that can be raised.
"""
def __init__(self, value):
self.parameter = value
def __str__(self):
return repr(self.parameter)
| gpl-2.0 |
awni/tensorflow | tensorflow/contrib/skflow/python/skflow/ops/dnn_ops.py | 1 | 1777 | """TensorFlow ops for deep neural networks."""
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from .dropout_ops import dropout
def dnn(tensor_in, hidden_units, activation=tf.nn.relu, keep_prob=None):
"""Creates fully connected deep neural network subgraph.
Args:
tensor_in: tensor or placeholder for input features.
hidden_units: list of counts of hidden units in each layer.
activation: activation function between layers. Can be None.
keep_prob: if not None, will add a dropout layer with given
probability.
Returns:
A tensor which would be a deep neural network.
"""
with tf.variable_scope('dnn'):
for i, n_units in enumerate(hidden_units):
with tf.variable_scope('layer%d' % i):
tensor_in = tf.nn.rnn_cell.linear(tensor_in, n_units, True)
if activation:
tensor_in = activation(tensor_in)
if keep_prob:
tensor_in = dropout(tensor_in, keep_prob)
return tensor_in
| apache-2.0 |
zeehio/META-SHARE | misc/tools/generateDS-2.7a/gends_user_methods.py | 32 | 6423 | #!/usr/bin/env python
# -*- mode: pymode; coding: latin1; -*-
import sys
import re
#
# You must include the following class definition at the top of
# your method specification file.
#
class MethodSpec(object):
def __init__(self, name='', source='', class_names='',
class_names_compiled=None):
"""MethodSpec -- A specification of a method.
Member variables:
name -- The method name
source -- The source code for the method. Must be
indented to fit in a class definition.
class_names -- A regular expression that must match the
class names in which the method is to be inserted.
class_names_compiled -- The compiled class names.
generateDS.py will do this compile for you.
"""
self.name = name
self.source = source
if class_names is None:
self.class_names = ('.*', )
else:
self.class_names = class_names
if class_names_compiled is None:
self.class_names_compiled = re.compile(self.class_names)
else:
self.class_names_compiled = class_names_compiled
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_source(self):
return self.source
def set_source(self, source):
self.source = source
def get_class_names(self):
return self.class_names
def set_class_names(self, class_names):
self.class_names = class_names
self.class_names_compiled = re.compile(class_names)
def get_class_names_compiled(self):
return self.class_names_compiled
def set_class_names_compiled(self, class_names_compiled):
self.class_names_compiled = class_names_compiled
def match_name(self, class_name):
"""Match against the name of the class currently being generated.
If this method returns True, the method will be inserted in
the generated class.
"""
if self.class_names_compiled.search(class_name):
return True
else:
return False
def get_interpolated_source(self, values_dict):
"""Get the method source code, interpolating values from values_dict
into it. The source returned by this method is inserted into
the generated class.
"""
source = self.source % values_dict
return source
def show(self):
print 'specification:'
print ' name: %s' % (self.name, )
print self.source
print ' class_names: %s' % (self.class_names, )
print ' names pat : %s' % (self.class_names_compiled.pattern, )
#
# Provide one or more method specification such as the following.
# Notes:
# - Each generated class contains a class variable _member_data_items.
# This variable contains a list of instances of class _MemberSpec.
# See the definition of class _MemberSpec near the top of the
# generated superclass file and also section "User Methods" in
# the documentation, as well as the examples below.
#
# Replace the following method specifications with your own.
#
# Sample method specification #1
#
method1 = MethodSpec(name='walk_and_update',
source='''\
def walk_and_update(self):
members = %(class_name)s._member_data_items
for member in members:
obj1 = getattr(self, member.get_name())
if member.get_data_type() == 'xs:date':
newvalue = date_calcs.date_from_string(obj1)
setattr(self, member.get_name(), newvalue)
elif member.get_container():
for child in obj1:
if type(child) == types.InstanceType:
child.walk_and_update()
else:
obj1 = getattr(self, member.get_name())
if type(obj1) == types.InstanceType:
obj1.walk_and_update()
if %(class_name)s.superclass != None:
%(class_name)s.superclass.walk_and_update(self)
''',
# class_names=r'^Employee$|^[a-zA-Z]*Dependent$',
class_names=r'^.*$',
)
#
# Sample method specification #2
#
method2 = MethodSpec(name='walk_and_show',
source='''\
def walk_and_show(self, depth):
global counter
counter += 1
depth += 1
print '%%d. class: %(class_name)s depth: %%d' %% (counter, depth, )
members = %(class_name)s._member_data_items
for member in members:
s1 = member.get_name()
s2 = member.get_data_type()
s3 = '%%d' %% member.get_container()
obj1 = getattr(self, member.get_name())
if member.get_container():
s4 = '<container>'
else:
if type(obj1) != types.InstanceType:
s4 = '%%s' %% obj1
else:
s4 = '<instance>'
s5 = '%%s%%s%%s %%s' %% (s1.ljust(16), s2.ljust(16), s3.rjust(4), s4, )
print ' ', s5
for member in members:
if member.get_container():
for child in getattr(self, member.get_name()):
if type(child) == types.InstanceType:
child.walk_and_show(depth)
else:
obj1 = getattr(self, member.get_name())
if type(obj1) == types.InstanceType:
obj1.walk_and_show(depth)
''',
# Attach to all classes.
class_names=r'^.*$',
)
#
# Sample method specification #3
#
method3 = MethodSpec(name='set_up',
source='''\
def set_up(self):
global types, counter
import types as types_module
types = types_module
counter = 0
''',
# Attach only to the root class: people.
class_names=r'^people$',
)
#
# Sample method specification #4
#
method4 = MethodSpec(name='method4',
source='''\
def method2(self, max):
if self.max > max:
return False
else:
return True
''',
class_names=r'^Truck$|^Boat$',
)
#
# Provide a list of your method specifications.
# This list of specifications must be named METHOD_SPECS.
#
METHOD_SPECS = (
method1,
method2,
method3,
method4,
)
def test():
for spec in METHOD_SPECS:
spec.show()
def main():
test()
if __name__ == '__main__':
main()
| bsd-3-clause |
dcramer/django-compositepks | django/core/serializers/python.py | 1 | 3939 | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models
from django.utils.encoding import smart_unicode
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append({
"model" : smart_unicode(obj._meta),
"pk" : smart_unicode(obj._get_pk_val(), strings_only=True),
"fields" : self._current
})
self._current = None
def handle_field(self, obj, field):
self._current[field.name] = smart_unicode(getattr(obj, field.name), strings_only=True)
def handle_fk_field(self, obj, field):
related = getattr(obj, field.name)
if related is not None:
# TODO: can we remove the field_name part?
if field.rel.field_name in related._meta.pk.names:
# Related to remote object via primary key
related = related._get_pk_val()
else:
# Related to remote object via other field
related = getattr(related, field.rel.field_name)
self._current[field.name] = smart_unicode(related, strings_only=True)
def handle_m2m_field(self, obj, field):
if field.creates_table:
self._current[field.name] = [smart_unicode(related._get_pk_val(), strings_only=True)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
models.get_apps()
for d in object_list:
# Look up the model and starting build a dict of data for it.
Model = _get_model(d["model"])
data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
m2m_data = {}
# Handle each field
for (field_name, field_value) in d["fields"].iteritems():
if isinstance(field_value, str):
field_value = smart_unicode(field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_convert = field.rel.to._meta.pk.to_python
m2m_data[field.name] = [m2m_convert(smart_unicode(pk)) for pk in field_value]
# Handle FK fields
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
if field_value is not None:
data[field.attname] = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
data[field.name] = field.to_python(field_value)
yield base.DeserializedObject(Model(**data), m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.module_name" string.
"""
try:
Model = models.get_model(*model_identifier.split("."))
except TypeError:
Model = None
if Model is None:
raise base.DeserializationError(u"Invalid model identifier: '%s'" % model_identifier)
return Model
| bsd-3-clause |
onceuponatimeforever/oh-mainline | vendor/packages/celery/celery/execute/trace.py | 18 | 5440 | # -*- coding: utf-8 -*-
"""
celery.execute.trace
~~~~~~~~~~~~~~~~~~~~
This module defines how the task execution is traced:
errors are recorded, handlers are applied and so on.
:copyright: (c) 2009 - 2011 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import traceback
from .. import states, signals
from ..datastructures import ExceptionInfo
from ..exceptions import RetryTaskError
from ..registry import tasks
class TraceInfo(object):
def __init__(self, status=states.PENDING, retval=None, exc_info=None):
self.status = status
self.retval = retval
self.exc_info = exc_info
self.exc_type = None
self.exc_value = None
self.tb = None
self.strtb = None
if self.exc_info:
self.exc_type, self.exc_value, self.tb = exc_info
self.strtb = "\n".join(traceback.format_exception(*exc_info))
@classmethod
def trace(cls, fun, args, kwargs, propagate=False):
"""Trace the execution of a function, calling the appropiate callback
if the function raises retry, an failure or returned successfully.
:keyword propagate: If true, errors will propagate to the caller.
"""
try:
return cls(states.SUCCESS, retval=fun(*args, **kwargs))
except RetryTaskError, exc:
return cls(states.RETRY, retval=exc, exc_info=sys.exc_info())
except Exception, exc:
if propagate:
raise
return cls(states.FAILURE, retval=exc, exc_info=sys.exc_info())
except BaseException, exc:
raise
except: # pragma: no cover
# For Python2.5 where raising strings are still allowed
# (but deprecated)
if propagate:
raise
return cls(states.FAILURE, retval=None, exc_info=sys.exc_info())
class TaskTrace(object):
def __init__(self, task_name, task_id, args, kwargs, task=None,
request=None, propagate=None, **_):
self.task_id = task_id
self.task_name = task_name
self.args = args
self.kwargs = kwargs
self.task = task or tasks[self.task_name]
self.request = request or {}
self.status = states.PENDING
self.strtb = None
self.propagate = propagate
self._trace_handlers = {states.FAILURE: self.handle_failure,
states.RETRY: self.handle_retry,
states.SUCCESS: self.handle_success}
def __call__(self):
return self.execute()
def execute(self):
self.task.request.update(self.request, args=self.args,
called_directly=False, kwargs=self.kwargs)
signals.task_prerun.send(sender=self.task, task_id=self.task_id,
task=self.task, args=self.args,
kwargs=self.kwargs)
retval = self._trace()
signals.task_postrun.send(sender=self.task, task_id=self.task_id,
task=self.task, args=self.args,
kwargs=self.kwargs, retval=retval)
self.task.request.clear()
return retval
def _trace(self):
trace = TraceInfo.trace(self.task, self.args, self.kwargs,
propagate=self.propagate)
self.status = trace.status
self.strtb = trace.strtb
handler = self._trace_handlers[trace.status]
r = handler(trace.retval, trace.exc_type, trace.tb, trace.strtb)
self.handle_after_return(trace.status, trace.retval,
trace.exc_type, trace.tb, trace.strtb,
einfo=trace.exc_info)
return r
def handle_after_return(self, status, retval, type_, tb, strtb,
einfo=None):
if status in states.EXCEPTION_STATES:
einfo = ExceptionInfo(einfo)
self.task.after_return(status, retval, self.task_id,
self.args, self.kwargs, einfo)
def handle_success(self, retval, *args):
"""Handle successful execution."""
self.task.on_success(retval, self.task_id, self.args, self.kwargs)
return retval
def handle_retry(self, exc, type_, tb, strtb):
"""Handle retry exception."""
# Create a simpler version of the RetryTaskError that stringifies
# the original exception instead of including the exception instance.
# This is for reporting the retry in logs, email etc, while
# guaranteeing pickleability.
message, orig_exc = exc.args
expanded_msg = "%s: %s" % (message, str(orig_exc))
einfo = ExceptionInfo((type_, type_(expanded_msg, None), tb))
self.task.on_retry(exc, self.task_id, self.args, self.kwargs, einfo)
return einfo
def handle_failure(self, exc, type_, tb, strtb):
"""Handle exception."""
einfo = ExceptionInfo((type_, exc, tb))
self.task.on_failure(exc, self.task_id, self.args, self.kwargs, einfo)
signals.task_failure.send(sender=self.task, task_id=self.task_id,
exception=exc, args=self.args,
kwargs=self.kwargs, traceback=tb,
einfo=einfo)
return einfo
| agpl-3.0 |
openiitbombayx/edx-platform | common/djangoapps/course_modes/migrations/0001_initial.py | 114 | 1860 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CourseMode'
db.create_table('course_modes_coursemode', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('mode_slug', self.gf('django.db.models.fields.CharField')(max_length=100)),
('mode_display_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('min_price', self.gf('django.db.models.fields.IntegerField')(default=0)),
('suggested_prices', self.gf('django.db.models.fields.CommaSeparatedIntegerField')(default='', max_length=255, blank=True)),
))
db.send_create_signal('course_modes', ['CourseMode'])
def backwards(self, orm):
# Deleting model 'CourseMode'
db.delete_table('course_modes_coursemode')
models = {
'course_modes.coursemode': {
'Meta': {'object_name': 'CourseMode'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['course_modes']
| agpl-3.0 |
charles1018/x-honami | Documentation/target/tcm_mod_builder.py | 4981 | 41422 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
harmy/kbengine | kbe/src/lib/python/Lib/idlelib/configHelpSourceEdit.py | 67 | 6725 | "Dialog to specify or edit the parameters for a user configured help source."
import os
import sys
from tkinter import *
import tkinter.messagebox as tkMessageBox
import tkinter.filedialog as tkFileDialog
class GetHelpSourceDialog(Toplevel):
def __init__(self, parent, title, menuItem='', filePath=''):
"""Get menu entry and url/ local file location for Additional Help
User selects a name for the Help resource and provides a web url
or a local file as its source. The user can enter a url or browse
for the file.
"""
Toplevel.__init__(self, parent)
self.configure(borderwidth=5)
self.resizable(height=FALSE, width=FALSE)
self.title(title)
self.transient(parent)
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.Cancel)
self.parent = parent
self.result = None
self.CreateWidgets()
self.menu.set(menuItem)
self.path.set(filePath)
self.withdraw() #hide while setting geometry
#needs to be done here so that the winfo_reqwidth is valid
self.update_idletasks()
#centre dialog over parent:
self.geometry("+%d+%d" %
((parent.winfo_rootx() + ((parent.winfo_width()/2)
-(self.winfo_reqwidth()/2)),
parent.winfo_rooty() + ((parent.winfo_height()/2)
-(self.winfo_reqheight()/2)))))
self.deiconify() #geometry set, unhide
self.bind('<Return>', self.Ok)
self.wait_window()
def CreateWidgets(self):
self.menu = StringVar(self)
self.path = StringVar(self)
self.fontSize = StringVar(self)
self.frameMain = Frame(self, borderwidth=2, relief=GROOVE)
self.frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
labelMenu = Label(self.frameMain, anchor=W, justify=LEFT,
text='Menu Item:')
self.entryMenu = Entry(self.frameMain, textvariable=self.menu,
width=30)
self.entryMenu.focus_set()
labelPath = Label(self.frameMain, anchor=W, justify=LEFT,
text='Help File Path: Enter URL or browse for file')
self.entryPath = Entry(self.frameMain, textvariable=self.path,
width=40)
self.entryMenu.focus_set()
labelMenu.pack(anchor=W, padx=5, pady=3)
self.entryMenu.pack(anchor=W, padx=5, pady=3)
labelPath.pack(anchor=W, padx=5, pady=3)
self.entryPath.pack(anchor=W, padx=5, pady=3)
browseButton = Button(self.frameMain, text='Browse', width=8,
command=self.browseFile)
browseButton.pack(pady=3)
frameButtons = Frame(self)
frameButtons.pack(side=BOTTOM, fill=X)
self.buttonOk = Button(frameButtons, text='OK',
width=8, default=ACTIVE, command=self.Ok)
self.buttonOk.grid(row=0, column=0, padx=5,pady=5)
self.buttonCancel = Button(frameButtons, text='Cancel',
width=8, command=self.Cancel)
self.buttonCancel.grid(row=0, column=1, padx=5, pady=5)
def browseFile(self):
filetypes = [
("HTML Files", "*.htm *.html", "TEXT"),
("PDF Files", "*.pdf", "TEXT"),
("Windows Help Files", "*.chm"),
("Text Files", "*.txt", "TEXT"),
("All Files", "*")]
path = self.path.get()
if path:
dir, base = os.path.split(path)
else:
base = None
if sys.platform[:3] == 'win':
dir = os.path.join(os.path.dirname(sys.executable), 'Doc')
if not os.path.isdir(dir):
dir = os.getcwd()
else:
dir = os.getcwd()
opendialog = tkFileDialog.Open(parent=self, filetypes=filetypes)
file = opendialog.show(initialdir=dir, initialfile=base)
if file:
self.path.set(file)
def MenuOk(self):
"Simple validity check for a sensible menu item name"
menuOk = True
menu = self.menu.get()
menu.strip()
if not menu:
tkMessageBox.showerror(title='Menu Item Error',
message='No menu item specified',
parent=self)
self.entryMenu.focus_set()
menuOk = False
elif len(menu) > 30:
tkMessageBox.showerror(title='Menu Item Error',
message='Menu item too long:'
'\nLimit 30 characters.',
parent=self)
self.entryMenu.focus_set()
menuOk = False
return menuOk
def PathOk(self):
"Simple validity check for menu file path"
pathOk = True
path = self.path.get()
path.strip()
if not path: #no path specified
tkMessageBox.showerror(title='File Path Error',
message='No help file path specified.',
parent=self)
self.entryPath.focus_set()
pathOk = False
elif path.startswith(('www.', 'http')):
pass
else:
if path[:5] == 'file:':
path = path[5:]
if not os.path.exists(path):
tkMessageBox.showerror(title='File Path Error',
message='Help file path does not exist.',
parent=self)
self.entryPath.focus_set()
pathOk = False
return pathOk
def Ok(self, event=None):
if self.MenuOk() and self.PathOk():
self.result = (self.menu.get().strip(),
self.path.get().strip())
if sys.platform == 'darwin':
path = self.result[1]
if path.startswith(('www', 'file:', 'http:')):
pass
else:
# Mac Safari insists on using the URI form for local files
self.result = list(self.result)
self.result[1] = "file://" + path
self.destroy()
def Cancel(self, event=None):
self.result = None
self.destroy()
if __name__ == '__main__':
#test the dialog
root = Tk()
def run():
keySeq = ''
dlg = GetHelpSourceDialog(root, 'Get Help Source')
print(dlg.result)
Button(root,text='Dialog', command=run).pack()
root.mainloop()
| lgpl-3.0 |
schlueter/ansible | test/units/executor/test_task_executor.py | 33 | 18675 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.task_executor import TaskExecutor, remove_omit
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import action_loader, lookup_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from units.mock.loader import DictDataLoader
class TestTaskExecutor(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_task_executor_init(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
new_stdin = None
job_vars = dict()
mock_queue = MagicMock()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
rslt_q=mock_queue,
)
def test_task_executor_run(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task._role._role_path = '/path/to/role/foo'
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_queue = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
rslt_q=mock_queue,
)
te._get_loop_items = MagicMock(return_value=None)
te._execute = MagicMock(return_value=dict())
res = te.run()
te._get_loop_items = MagicMock(return_value=[])
res = te.run()
te._get_loop_items = MagicMock(return_value=['a', 'b', 'c'])
te._run_loop = MagicMock(return_value=[dict(item='a', changed=True), dict(item='b', failed=True), dict(item='c')])
res = te.run()
te._get_loop_items = MagicMock(side_effect=AnsibleError(""))
res = te.run()
self.assertIn("failed", res)
def test_task_executor_get_loop_items(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.loop_with = 'items'
mock_task.loop = ['a', 'b', 'c']
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_shared_loader.lookup_loader = lookup_loader
new_stdin = None
job_vars = dict()
mock_queue = MagicMock()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
rslt_q=mock_queue,
)
items = te._get_loop_items()
self.assertEqual(items, ['a', 'b', 'c'])
def test_task_executor_run_loop(self):
items = ['a', 'b', 'c']
fake_loader = DictDataLoader({})
mock_host = MagicMock()
def _copy(exclude_parent=False, exclude_tasks=False):
new_item = MagicMock()
return new_item
mock_task = MagicMock()
mock_task.copy.side_effect = _copy
mock_play_context = MagicMock()
mock_shared_loader = MagicMock()
mock_queue = MagicMock()
new_stdin = None
job_vars = dict()
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
rslt_q=mock_queue,
)
def _execute(variables):
return dict(item=variables.get('item'))
te._squash_items = MagicMock(return_value=items)
te._execute = MagicMock(side_effect=_execute)
res = te._run_loop(items)
self.assertEqual(len(res), 3)
def test_task_executor_squash_items(self):
items = ['a', 'b', 'c']
fake_loader = DictDataLoader({})
mock_host = MagicMock()
loop_var = 'item'
def _evaluate_conditional(templar, variables):
item = variables.get(loop_var)
if item == 'b':
return False
return True
mock_task = MagicMock()
mock_task.evaluate_conditional.side_effect = _evaluate_conditional
mock_play_context = MagicMock()
mock_shared_loader = None
mock_queue = MagicMock()
new_stdin = None
job_vars = dict(pkg_mgr='yum')
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=mock_shared_loader,
rslt_q=mock_queue,
)
# No replacement
mock_task.action = 'yum'
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
self.assertIsInstance(mock_task.args, MagicMock)
mock_task.action = 'foo'
mock_task.args = {'name': '{{item}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
self.assertEqual(mock_task.args, {'name': '{{item}}'})
mock_task.action = 'yum'
mock_task.args = {'name': 'static'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
self.assertEqual(mock_task.args, {'name': 'static'})
mock_task.action = 'yum'
mock_task.args = {'name': '{{pkg_mgr}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
self.assertEqual(mock_task.args, {'name': '{{pkg_mgr}}'})
mock_task.action = '{{unknown}}'
mock_task.args = {'name': '{{item}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, ['a', 'b', 'c'])
self.assertEqual(mock_task.args, {'name': '{{item}}'})
# Could do something like this to recover from bad deps in a package
job_vars = dict(pkg_mgr='yum', packages=['a', 'b'])
items = ['absent', 'latest']
mock_task.action = 'yum'
mock_task.args = {'name': '{{ packages }}', 'state': '{{ item }}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{ packages }}', 'state': '{{ item }}'})
# Maybe should raise an error in this case. The user would have to specify:
# - yum: name="{{ packages[item] }}"
# with_items:
# - ['a', 'b']
# - ['foo', 'bar']
# you can't use a list as a dict key so that would probably throw
# an error later. If so, we can throw it now instead.
# Squashing in this case would not be intuitive as the user is being
# explicit in using each list entry as a key.
job_vars = dict(pkg_mgr='yum', packages={"a": "foo", "b": "bar", "foo": "baz", "bar": "quux"})
items = [['a', 'b'], ['foo', 'bar']]
mock_task.action = 'yum'
mock_task.args = {'name': '{{ packages[item] }}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{ packages[item] }}'})
# Replaces
items = ['a', 'b', 'c']
mock_task.action = 'yum'
mock_task.args = {'name': '{{item}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, [['a', 'c']])
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
mock_task.action = '{{pkg_mgr}}'
mock_task.args = {'name': '{{item}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
self.assertEqual(new_items, [['a', 'c']])
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
# New loop_var
mock_task.action = 'yum'
mock_task.args = {'name': '{{a_loop_var_item}}'}
mock_task.loop_control = {'loop_var': 'a_loop_var_item'}
loop_var = 'a_loop_var_item'
new_items = te._squash_items(items=items, loop_var='a_loop_var_item', variables=job_vars)
self.assertEqual(new_items, [['a', 'c']])
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
loop_var = 'item'
#
# These are presently not optimized but could be in the future.
# Expected output if they were optimized is given as a comment
# Please move these to a different section if they are optimized
#
# Squashing lists
job_vars = dict(pkg_mgr='yum')
items = [['a', 'b'], ['foo', 'bar']]
mock_task.action = 'yum'
mock_task.args = {'name': '{{ item }}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
# self.assertEqual(new_items, [['a', 'b', 'foo', 'bar']])
# self.assertEqual(mock_task.args, {'name': ['a', 'b', 'foo', 'bar']})
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{ item }}'})
# Retrieving from a dict
items = ['a', 'b', 'foo']
mock_task.action = 'yum'
mock_task.args = {'name': '{{ packages[item] }}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
# self.assertEqual(new_items, [['foo', 'baz']])
# self.assertEqual(mock_task.args, {'name': ['foo', 'baz']})
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{ packages[item] }}'})
# Another way to retrieve from a dict
job_vars = dict(pkg_mgr='yum')
items = [{'package': 'foo'}, {'package': 'bar'}]
mock_task.action = 'yum'
mock_task.args = {'name': '{{ item["package"] }}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
# self.assertEqual(new_items, [['foo', 'bar']])
# self.assertEqual(mock_task.args, {'name': ['foo', 'bar']})
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{ item["package"] }}'})
items = [
dict(name='a', state='present'),
dict(name='b', state='present'),
dict(name='c', state='present'),
]
mock_task.action = 'yum'
mock_task.args = {'name': '{{item.name}}', 'state': '{{item.state}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
# self.assertEqual(new_items, [dict(name=['a', 'b', 'c'], state='present')])
# self.assertEqual(mock_task.args, {'name': ['a', 'b', 'c'], 'state': 'present'})
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
items = [
dict(name='a', state='present'),
dict(name='b', state='present'),
dict(name='c', state='absent'),
]
mock_task.action = 'yum'
mock_task.args = {'name': '{{item.name}}', 'state': '{{item.state}}'}
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
# self.assertEqual(new_items, [dict(name=['a', 'b'], state='present'),
# dict(name='c', state='absent')])
# self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
self.assertEqual(new_items, items)
self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
def test_task_executor_execute(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.args = dict()
mock_task.retries = 0
mock_task.delay = -1
mock_task.register = 'foo'
mock_task.until = None
mock_task.changed_when = None
mock_task.failed_when = None
mock_task.post_validate.return_value = None
# mock_task.async_val cannot be left unset, because on Python 3 MagicMock()
# > 0 raises a TypeError There are two reasons for using the value 1
# here: on Python 2 comparing MagicMock() > 0 returns True, and the
# other reason is that if I specify 0 here, the test fails. ;)
mock_task.async_val = 1
mock_task.poll = 0
mock_play_context = MagicMock()
mock_play_context.post_validate.return_value = None
mock_play_context.update_vars.return_value = None
mock_connection = MagicMock()
mock_connection.set_host_overrides.return_value = None
mock_connection._connect.return_value = None
mock_action = MagicMock()
mock_queue = MagicMock()
shared_loader = None
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=shared_loader,
rslt_q=mock_queue,
)
te._get_connection = MagicMock(return_value=mock_connection)
te._get_action_handler = MagicMock(return_value=mock_action)
mock_action.run.return_value = dict(ansible_facts=dict())
res = te._execute()
mock_task.changed_when = MagicMock(return_value=AnsibleUnicode("1 == 1"))
res = te._execute()
mock_task.changed_when = None
mock_task.failed_when = MagicMock(return_value=AnsibleUnicode("1 == 1"))
res = te._execute()
mock_task.failed_when = None
mock_task.evaluate_conditional.return_value = False
res = te._execute()
mock_task.evaluate_conditional.return_value = True
mock_task.args = dict(_raw_params='foo.yml', a='foo', b='bar')
mock_task.action = 'include'
res = te._execute()
def test_task_executor_poll_async_result(self):
fake_loader = DictDataLoader({})
mock_host = MagicMock()
mock_task = MagicMock()
mock_task.async_val = 0.1
mock_task.poll = 0.05
mock_play_context = MagicMock()
mock_connection = MagicMock()
mock_action = MagicMock()
mock_queue = MagicMock()
shared_loader = MagicMock()
shared_loader.action_loader = action_loader
new_stdin = None
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
te = TaskExecutor(
host=mock_host,
task=mock_task,
job_vars=job_vars,
play_context=mock_play_context,
new_stdin=new_stdin,
loader=fake_loader,
shared_loader_obj=shared_loader,
rslt_q=mock_queue,
)
te._connection = MagicMock()
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(stdout='')
return mock_action
# testing with some bad values in the result passed to poll async,
# and with a bad value returned from the mock action
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(), templar=mock_templar)
self.assertIn('failed', res)
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertIn('failed', res)
def _get(*args, **kwargs):
mock_action = MagicMock()
mock_action.run.return_value = dict(finished=1)
return mock_action
# now testing with good values
with patch.object(action_loader, 'get', _get):
mock_templar = MagicMock()
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
self.assertEqual(res, dict(finished=1))
def test_recursive_remove_omit(self):
omit_token = 'POPCORN'
data = {
'foo': 'bar',
'baz': 1,
'qux': ['one', 'two', 'three'],
'subdict': {
'remove': 'POPCORN',
'keep': 'not_popcorn',
'subsubdict': {
'remove': 'POPCORN',
'keep': 'not_popcorn',
},
'a_list': ['POPCORN'],
},
'a_list': ['POPCORN'],
}
expected = {
'foo': 'bar',
'baz': 1,
'qux': ['one', 'two', 'three'],
'subdict': {
'keep': 'not_popcorn',
'subsubdict': {
'keep': 'not_popcorn',
},
'a_list': ['POPCORN'],
},
'a_list': ['POPCORN'],
}
self.assertEqual(remove_omit(data, omit_token), expected)
| gpl-3.0 |
cfelton/myhdl | myhdl/test/bugs/test_issue_117.py | 3 | 1623 | from __future__ import absolute_import
import myhdl
from myhdl import *
from myhdl.conversion import analyze
def issue_117(clk, sdi, pdo, sel, const=False):
assert isinstance(const, (bool, intbv))
delay_reg = Signal(intbv(0)[8:])
rlen = len(pdo)
plen = 1 if isinstance(const, bool) else len(const)
@always(clk.posedge)
def rtl():
if sel == 0:
delay_reg.next = concat(const, delay_reg[rlen-plen-1:1], sdi)
elif sel == 1:
delay_reg.next = concat(delay_reg[rlen-1:plen+1], const, sdi)
elif sel == 2:
delay_reg.next = concat(delay_reg[rlen-1:plen+1], sdi, const)
pdo.next = delay_reg
return rtl
def test_issue_117_1():
clk, sdi = [Signal(bool(0)) for _ in range(2)]
pdo = Signal(intbv(0)[8:])
sel = Signal(intbv(0, min=0, max=3))
toVHDL.name = toVerilog.name = 'issue_117_1'
assert analyze(issue_117, clk, sdi, pdo, sel, const=bool(0))== 0
def test_issue_117_2():
clk, sdi = [Signal(bool(0)) for _ in range(2)]
pdo = Signal(intbv(0)[8:])
sel = Signal(intbv(0, min=0, max=3))
toVHDL.name = toVerilog.name = 'issue_117_2'
assert analyze(issue_117, clk, sdi, pdo, sel, const=False)== 0
def test_issue_117_3():
clk, sdi = [Signal(bool(0)) for _ in range(2)]
pdo = Signal(intbv(0)[8:])
sel = Signal(intbv(0, min=0, max=3))
toVHDL.name = toVerilog.name = 'issue_117_3'
assert analyze(issue_117, clk, sdi, pdo, sel, const=intbv(0)[1:])== 0
if __name__ == '__main__':
analyze.simulator='vlog'
test_issue_117_1()
| lgpl-2.1 |
doismellburning/django | tests/view_tests/tests/test_static.py | 2 | 5445 | from __future__ import unicode_literals
import mimetypes
from os import path
import unittest
from django.conf.urls.static import static
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = 'site_media'
def test_serve(self):
"The static view can serve static media"
media_files = ['file.txt', 'file.txt.gz']
for filename in media_files:
response = self.client.get('/%s/%s' % (self.prefix, filename))
response_content = b''.join(response)
file_path = path.join(media_dir, filename)
with open(file_path, 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None))
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get('/%s/%s' % (self.prefix, 'long-line.txt'))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get('/%s/file.unknown' % self.prefix)
self.assertEqual('application/octet-stream', response['Content-Type'])
response.close()
def test_copes_with_empty_path_component(self):
file_name = 'file.txt'
response = self.client.get('/%s//%s' % (self.prefix, file_name))
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = 'file.txt'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT')
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Mon, 18 Jan 2038 05:14:07 GMT'
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_404(self):
response = self.client.get('/%s/non_existing_resource' % self.prefix)
self.assertEqual(404, response.status_code)
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super(StaticHelperTest, self).setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static('/media/', document_root=media_dir)
def tearDown(self):
super(StaticHelperTest, self).tearDown()
urls.urlpatterns = self._old_views_urlpatterns
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
Test that a floating point mtime does not disturb was_modified_since.
(#18675)
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
| bsd-3-clause |
kursitet/edx-platform | lms/djangoapps/courseware/tests/test_model_data.py | 30 | 19792 | """
Test for lms courseware app, module data (runtime data storage for XBlocks)
"""
import json
from mock import Mock, patch
from nose.plugins.attrib import attr
from functools import partial
from courseware.model_data import DjangoKeyValueStore, FieldDataCache, InvalidScopeError
from courseware.models import StudentModule, XModuleUserStateSummaryField
from courseware.models import XModuleStudentInfoField, XModuleStudentPrefsField
from student.tests.factories import UserFactory
from courseware.tests.factories import StudentModuleFactory as cmfStudentModuleFactory, location, course_id
from courseware.tests.factories import UserStateSummaryFactory
from courseware.tests.factories import StudentPrefsFactory, StudentInfoFactory
from xblock.fields import Scope, BlockScope, ScopeIds
from xblock.exceptions import KeyValueMultiSaveError
from xblock.core import XBlock
from django.test import TestCase
from django.db import DatabaseError
def mock_field(scope, name):
field = Mock()
field.scope = scope
field.name = name
return field
def mock_descriptor(fields=[]):
descriptor = Mock(entry_point=XBlock.entry_point)
descriptor.scope_ids = ScopeIds('user1', 'mock_problem', location('def_id'), location('usage_id'))
descriptor.module_class.fields.values.return_value = fields
descriptor.fields.values.return_value = fields
descriptor.module_class.__name__ = 'MockProblemModule'
return descriptor
# The user ids here are 1 because we make a student in the setUp functions, and
# they get an id of 1. There's an assertion in setUp to ensure that assumption
# is still true.
user_state_summary_key = partial(DjangoKeyValueStore.Key, Scope.user_state_summary, None, location('usage_id'))
settings_key = partial(DjangoKeyValueStore.Key, Scope.settings, None, location('usage_id'))
user_state_key = partial(DjangoKeyValueStore.Key, Scope.user_state, 1, location('usage_id'))
prefs_key = partial(DjangoKeyValueStore.Key, Scope.preferences, 1, 'mock_problem')
user_info_key = partial(DjangoKeyValueStore.Key, Scope.user_info, 1, None)
class StudentModuleFactory(cmfStudentModuleFactory):
module_state_key = location('usage_id')
course_id = course_id
@attr('shard_1')
class TestInvalidScopes(TestCase):
def setUp(self):
super(TestInvalidScopes, self).setUp()
self.user = UserFactory.create(username='user')
self.field_data_cache = FieldDataCache([mock_descriptor([mock_field(Scope.user_state, 'a_field')])], course_id, self.user)
self.kvs = DjangoKeyValueStore(self.field_data_cache)
def test_invalid_scopes(self):
for scope in (Scope(user=True, block=BlockScope.DEFINITION),
Scope(user=False, block=BlockScope.TYPE),
Scope(user=False, block=BlockScope.ALL)):
key = DjangoKeyValueStore.Key(scope, None, None, 'field')
self.assertRaises(InvalidScopeError, self.kvs.get, key)
self.assertRaises(InvalidScopeError, self.kvs.set, key, 'value')
self.assertRaises(InvalidScopeError, self.kvs.delete, key)
self.assertRaises(InvalidScopeError, self.kvs.has, key)
self.assertRaises(InvalidScopeError, self.kvs.set_many, {key: 'value'})
@attr('shard_1')
class OtherUserFailureTestMixin(object):
"""
Mixin class to add test cases for failures when a user trying to use the kvs is not
the one that instantiated the kvs.
Doing a mixin rather than modifying StorageTestBase (below) because some scopes don't fail in this case, because
they aren't bound to a particular user
assumes that this is mixed into a class that defines other_key_factory and existing_field_name
"""
def test_other_user_kvs_get_failure(self):
"""
Test for assert failure when a user who didn't create the kvs tries to get from it it
"""
with self.assertRaises(AssertionError):
self.kvs.get(self.other_key_factory(self.existing_field_name))
def test_other_user_kvs_set_failure(self):
"""
Test for assert failure when a user who didn't create the kvs tries to get from it it
"""
with self.assertRaises(AssertionError):
self.kvs.set(self.other_key_factory(self.existing_field_name), "new_value")
@attr('shard_1')
class TestStudentModuleStorage(OtherUserFailureTestMixin, TestCase):
"""Tests for user_state storage via StudentModule"""
other_key_factory = partial(DjangoKeyValueStore.Key, Scope.user_state, 2, location('usage_id')) # user_id=2, not 1
existing_field_name = "a_field"
def setUp(self):
super(TestStudentModuleStorage, self).setUp()
student_module = StudentModuleFactory(state=json.dumps({'a_field': 'a_value', 'b_field': 'b_value'}))
self.user = student_module.student
self.assertEqual(self.user.id, 1) # check our assumption hard-coded in the key functions above.
# There should be only one query to load a single descriptor with a single user_state field
with self.assertNumQueries(1):
self.field_data_cache = FieldDataCache(
[mock_descriptor([mock_field(Scope.user_state, 'a_field')])], course_id, self.user
)
self.kvs = DjangoKeyValueStore(self.field_data_cache)
def test_get_existing_field(self):
"Test that getting an existing field in an existing StudentModule works"
# This should only read from the cache, not the database
with self.assertNumQueries(0):
self.assertEquals('a_value', self.kvs.get(user_state_key('a_field')))
def test_get_missing_field(self):
"Test that getting a missing field from an existing StudentModule raises a KeyError"
# This should only read from the cache, not the database
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.get, user_state_key('not_a_field'))
def test_set_existing_field(self):
"Test that setting an existing user_state field changes the value"
# We are updating a problem, so we write to courseware_studentmodulehistory
# as well as courseware_studentmodule. We also need to read the database
# to discover if something other than the DjangoXBlockUserStateClient
# has written to the StudentModule (such as UserStateCache setting the score
# on the StudentModule).
with self.assertNumQueries(3):
self.kvs.set(user_state_key('a_field'), 'new_value')
self.assertEquals(1, StudentModule.objects.all().count())
self.assertEquals({'b_field': 'b_value', 'a_field': 'new_value'}, json.loads(StudentModule.objects.all()[0].state))
def test_set_missing_field(self):
"Test that setting a new user_state field changes the value"
# We are updating a problem, so we write to courseware_studentmodulehistory
# as well as courseware_studentmodule. We also need to read the database
# to discover if something other than the DjangoXBlockUserStateClient
# has written to the StudentModule (such as UserStateCache setting the score
# on the StudentModule).
with self.assertNumQueries(3):
self.kvs.set(user_state_key('not_a_field'), 'new_value')
self.assertEquals(1, StudentModule.objects.all().count())
self.assertEquals({'b_field': 'b_value', 'a_field': 'a_value', 'not_a_field': 'new_value'}, json.loads(StudentModule.objects.all()[0].state))
def test_delete_existing_field(self):
"Test that deleting an existing field removes it from the StudentModule"
# We are updating a problem, so we write to courseware_studentmodulehistory
# as well as courseware_studentmodule. We also need to read the database
# to discover if something other than the DjangoXBlockUserStateClient
# has written to the StudentModule (such as UserStateCache setting the score
# on the StudentModule).
with self.assertNumQueries(3):
self.kvs.delete(user_state_key('a_field'))
self.assertEquals(1, StudentModule.objects.all().count())
self.assertRaises(KeyError, self.kvs.get, user_state_key('not_a_field'))
def test_delete_missing_field(self):
"Test that deleting a missing field from an existing StudentModule raises a KeyError"
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.delete, user_state_key('not_a_field'))
self.assertEquals(1, StudentModule.objects.all().count())
self.assertEquals({'b_field': 'b_value', 'a_field': 'a_value'}, json.loads(StudentModule.objects.all()[0].state))
def test_has_existing_field(self):
"Test that `has` returns True for existing fields in StudentModules"
with self.assertNumQueries(0):
self.assertTrue(self.kvs.has(user_state_key('a_field')))
def test_has_missing_field(self):
"Test that `has` returns False for missing fields in StudentModule"
with self.assertNumQueries(0):
self.assertFalse(self.kvs.has(user_state_key('not_a_field')))
def construct_kv_dict(self):
"""Construct a kv_dict that can be passed to set_many"""
key1 = user_state_key('field_a')
key2 = user_state_key('field_b')
new_value = 'new value'
newer_value = 'newer value'
return {key1: new_value, key2: newer_value}
def test_set_many(self):
"Test setting many fields that are scoped to Scope.user_state"
kv_dict = self.construct_kv_dict()
# Scope.user_state is stored in a single row in the database, so we only
# need to send a single update to that table.
# We also are updating a problem, so we write to courseware student module history
# We also need to read the database to discover if something other than the
# DjangoXBlockUserStateClient has written to the StudentModule (such as
# UserStateCache setting the score on the StudentModule).
with self.assertNumQueries(3):
self.kvs.set_many(kv_dict)
for key in kv_dict:
self.assertEquals(self.kvs.get(key), kv_dict[key])
def test_set_many_failure(self):
"Test failures when setting many fields that are scoped to Scope.user_state"
kv_dict = self.construct_kv_dict()
# because we're patching the underlying save, we need to ensure the
# fields are in the cache
for key in kv_dict:
self.kvs.set(key, 'test_value')
with patch('django.db.models.Model.save', side_effect=DatabaseError):
with self.assertRaises(KeyValueMultiSaveError) as exception_context:
self.kvs.set_many(kv_dict)
self.assertEquals(exception_context.exception.saved_field_names, [])
@attr('shard_1')
class TestMissingStudentModule(TestCase):
def setUp(self):
super(TestMissingStudentModule, self).setUp()
self.user = UserFactory.create(username='user')
self.assertEqual(self.user.id, 1) # check our assumption hard-coded in the key functions above.
# The descriptor has no fields, so FDC shouldn't send any queries
with self.assertNumQueries(0):
self.field_data_cache = FieldDataCache([mock_descriptor()], course_id, self.user)
self.kvs = DjangoKeyValueStore(self.field_data_cache)
def test_get_field_from_missing_student_module(self):
"Test that getting a field from a missing StudentModule raises a KeyError"
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.get, user_state_key('a_field'))
def test_set_field_in_missing_student_module(self):
"Test that setting a field in a missing StudentModule creates the student module"
self.assertEquals(0, len(self.field_data_cache))
self.assertEquals(0, StudentModule.objects.all().count())
# We are updating a problem, so we write to courseware_studentmodulehistory
# as well as courseware_studentmodule. We also need to read the database
# to discover if something other than the DjangoXBlockUserStateClient
# has written to the StudentModule (such as UserStateCache setting the score
# on the StudentModule).
with self.assertNumQueries(5):
self.kvs.set(user_state_key('a_field'), 'a_value')
self.assertEquals(1, sum(len(cache) for cache in self.field_data_cache.cache.values()))
self.assertEquals(1, StudentModule.objects.all().count())
student_module = StudentModule.objects.all()[0]
self.assertEquals({'a_field': 'a_value'}, json.loads(student_module.state))
self.assertEquals(self.user, student_module.student)
self.assertEquals(location('usage_id').replace(run=None), student_module.module_state_key)
self.assertEquals(course_id, student_module.course_id)
def test_delete_field_from_missing_student_module(self):
"Test that deleting a field from a missing StudentModule raises a KeyError"
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.delete, user_state_key('a_field'))
def test_has_field_for_missing_student_module(self):
"Test that `has` returns False for missing StudentModules"
with self.assertNumQueries(0):
self.assertFalse(self.kvs.has(user_state_key('a_field')))
@attr('shard_1')
class StorageTestBase(object):
"""
A base class for that gets subclassed when testing each of the scopes.
"""
# Disable pylint warnings that arise because of the way the child classes call
# this base class -- pylint's static analysis can't keep up with it.
# pylint: disable=no-member, not-callable
factory = None
scope = None
key_factory = None
storage_class = None
def setUp(self):
field_storage = self.factory.create()
if hasattr(field_storage, 'student'):
self.user = field_storage.student
else:
self.user = UserFactory.create()
self.mock_descriptor = mock_descriptor([
mock_field(self.scope, 'existing_field'),
mock_field(self.scope, 'other_existing_field')])
# Each field is stored as a separate row in the table,
# but we can query them in a single query
with self.assertNumQueries(1):
self.field_data_cache = FieldDataCache([self.mock_descriptor], course_id, self.user)
self.kvs = DjangoKeyValueStore(self.field_data_cache)
def test_set_and_get_existing_field(self):
with self.assertNumQueries(1):
self.kvs.set(self.key_factory('existing_field'), 'test_value')
with self.assertNumQueries(0):
self.assertEquals('test_value', self.kvs.get(self.key_factory('existing_field')))
def test_get_existing_field(self):
"Test that getting an existing field in an existing Storage Field works"
with self.assertNumQueries(0):
self.assertEquals('old_value', self.kvs.get(self.key_factory('existing_field')))
def test_get_missing_field(self):
"Test that getting a missing field from an existing Storage Field raises a KeyError"
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.get, self.key_factory('missing_field'))
def test_set_existing_field(self):
"Test that setting an existing field changes the value"
with self.assertNumQueries(1):
self.kvs.set(self.key_factory('existing_field'), 'new_value')
self.assertEquals(1, self.storage_class.objects.all().count())
self.assertEquals('new_value', json.loads(self.storage_class.objects.all()[0].value))
def test_set_missing_field(self):
"Test that setting a new field changes the value"
with self.assertNumQueries(1):
self.kvs.set(self.key_factory('missing_field'), 'new_value')
self.assertEquals(2, self.storage_class.objects.all().count())
self.assertEquals('old_value', json.loads(self.storage_class.objects.get(field_name='existing_field').value))
self.assertEquals('new_value', json.loads(self.storage_class.objects.get(field_name='missing_field').value))
def test_delete_existing_field(self):
"Test that deleting an existing field removes it"
with self.assertNumQueries(1):
self.kvs.delete(self.key_factory('existing_field'))
self.assertEquals(0, self.storage_class.objects.all().count())
def test_delete_missing_field(self):
"Test that deleting a missing field from an existing Storage Field raises a KeyError"
with self.assertNumQueries(0):
self.assertRaises(KeyError, self.kvs.delete, self.key_factory('missing_field'))
self.assertEquals(1, self.storage_class.objects.all().count())
def test_has_existing_field(self):
"Test that `has` returns True for an existing Storage Field"
with self.assertNumQueries(0):
self.assertTrue(self.kvs.has(self.key_factory('existing_field')))
def test_has_missing_field(self):
"Test that `has` return False for an existing Storage Field"
with self.assertNumQueries(0):
self.assertFalse(self.kvs.has(self.key_factory('missing_field')))
def construct_kv_dict(self):
"""Construct a kv_dict that can be passed to set_many"""
key1 = self.key_factory('existing_field')
key2 = self.key_factory('other_existing_field')
new_value = 'new value'
newer_value = 'newer value'
return {key1: new_value, key2: newer_value}
def test_set_many(self):
"""Test that setting many regular fields at the same time works"""
kv_dict = self.construct_kv_dict()
# Each field is a separate row in the database, hence
# a separate query
with self.assertNumQueries(len(kv_dict)):
self.kvs.set_many(kv_dict)
for key in kv_dict:
self.assertEquals(self.kvs.get(key), kv_dict[key])
def test_set_many_failure(self):
"""Test that setting many regular fields with a DB error """
kv_dict = self.construct_kv_dict()
for key in kv_dict:
with self.assertNumQueries(1):
self.kvs.set(key, 'test value')
with patch('django.db.models.Model.save', side_effect=[None, DatabaseError]):
with self.assertRaises(KeyValueMultiSaveError) as exception_context:
self.kvs.set_many(kv_dict)
exception = exception_context.exception
self.assertEquals(exception.saved_field_names, ['existing_field', 'other_existing_field'])
class TestUserStateSummaryStorage(StorageTestBase, TestCase):
"""Tests for UserStateSummaryStorage"""
factory = UserStateSummaryFactory
scope = Scope.user_state_summary
key_factory = user_state_summary_key
storage_class = XModuleUserStateSummaryField
class TestStudentPrefsStorage(OtherUserFailureTestMixin, StorageTestBase, TestCase):
"""Tests for StudentPrefStorage"""
factory = StudentPrefsFactory
scope = Scope.preferences
key_factory = prefs_key
storage_class = XModuleStudentPrefsField
other_key_factory = partial(DjangoKeyValueStore.Key, Scope.preferences, 2, 'mock_problem') # user_id=2, not 1
existing_field_name = "existing_field"
class TestStudentInfoStorage(OtherUserFailureTestMixin, StorageTestBase, TestCase):
"""Tests for StudentInfoStorage"""
factory = StudentInfoFactory
scope = Scope.user_info
key_factory = user_info_key
storage_class = XModuleStudentInfoField
other_key_factory = partial(DjangoKeyValueStore.Key, Scope.user_info, 2, 'mock_problem') # user_id=2, not 1
existing_field_name = "existing_field"
| agpl-3.0 |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/debian/headphones/apps/headphones/lib/unidecode/x06a.py | 252 | 4674 | data = (
'Di ', # 0x00
'Zhuang ', # 0x01
'Le ', # 0x02
'Lang ', # 0x03
'Chen ', # 0x04
'Cong ', # 0x05
'Li ', # 0x06
'Xiu ', # 0x07
'Qing ', # 0x08
'Shuang ', # 0x09
'Fan ', # 0x0a
'Tong ', # 0x0b
'Guan ', # 0x0c
'Ji ', # 0x0d
'Suo ', # 0x0e
'Lei ', # 0x0f
'Lu ', # 0x10
'Liang ', # 0x11
'Mi ', # 0x12
'Lou ', # 0x13
'Chao ', # 0x14
'Su ', # 0x15
'Ke ', # 0x16
'Shu ', # 0x17
'Tang ', # 0x18
'Biao ', # 0x19
'Lu ', # 0x1a
'Jiu ', # 0x1b
'Shu ', # 0x1c
'Zha ', # 0x1d
'Shu ', # 0x1e
'Zhang ', # 0x1f
'Men ', # 0x20
'Mo ', # 0x21
'Niao ', # 0x22
'Yang ', # 0x23
'Tiao ', # 0x24
'Peng ', # 0x25
'Zhu ', # 0x26
'Sha ', # 0x27
'Xi ', # 0x28
'Quan ', # 0x29
'Heng ', # 0x2a
'Jian ', # 0x2b
'Cong ', # 0x2c
'[?] ', # 0x2d
'Hokuso ', # 0x2e
'Qiang ', # 0x2f
'Tara ', # 0x30
'Ying ', # 0x31
'Er ', # 0x32
'Xin ', # 0x33
'Zhi ', # 0x34
'Qiao ', # 0x35
'Zui ', # 0x36
'Cong ', # 0x37
'Pu ', # 0x38
'Shu ', # 0x39
'Hua ', # 0x3a
'Kui ', # 0x3b
'Zhen ', # 0x3c
'Zun ', # 0x3d
'Yue ', # 0x3e
'Zhan ', # 0x3f
'Xi ', # 0x40
'Xun ', # 0x41
'Dian ', # 0x42
'Fa ', # 0x43
'Gan ', # 0x44
'Mo ', # 0x45
'Wu ', # 0x46
'Qiao ', # 0x47
'Nao ', # 0x48
'Lin ', # 0x49
'Liu ', # 0x4a
'Qiao ', # 0x4b
'Xian ', # 0x4c
'Run ', # 0x4d
'Fan ', # 0x4e
'Zhan ', # 0x4f
'Tuo ', # 0x50
'Lao ', # 0x51
'Yun ', # 0x52
'Shun ', # 0x53
'Tui ', # 0x54
'Cheng ', # 0x55
'Tang ', # 0x56
'Meng ', # 0x57
'Ju ', # 0x58
'Cheng ', # 0x59
'Su ', # 0x5a
'Jue ', # 0x5b
'Jue ', # 0x5c
'Tan ', # 0x5d
'Hui ', # 0x5e
'Ji ', # 0x5f
'Nuo ', # 0x60
'Xiang ', # 0x61
'Tuo ', # 0x62
'Ning ', # 0x63
'Rui ', # 0x64
'Zhu ', # 0x65
'Chuang ', # 0x66
'Zeng ', # 0x67
'Fen ', # 0x68
'Qiong ', # 0x69
'Ran ', # 0x6a
'Heng ', # 0x6b
'Cen ', # 0x6c
'Gu ', # 0x6d
'Liu ', # 0x6e
'Lao ', # 0x6f
'Gao ', # 0x70
'Chu ', # 0x71
'Zusa ', # 0x72
'Nude ', # 0x73
'Ca ', # 0x74
'San ', # 0x75
'Ji ', # 0x76
'Dou ', # 0x77
'Shou ', # 0x78
'Lu ', # 0x79
'[?] ', # 0x7a
'[?] ', # 0x7b
'Yuan ', # 0x7c
'Ta ', # 0x7d
'Shu ', # 0x7e
'Jiang ', # 0x7f
'Tan ', # 0x80
'Lin ', # 0x81
'Nong ', # 0x82
'Yin ', # 0x83
'Xi ', # 0x84
'Sui ', # 0x85
'Shan ', # 0x86
'Zui ', # 0x87
'Xuan ', # 0x88
'Cheng ', # 0x89
'Gan ', # 0x8a
'Ju ', # 0x8b
'Zui ', # 0x8c
'Yi ', # 0x8d
'Qin ', # 0x8e
'Pu ', # 0x8f
'Yan ', # 0x90
'Lei ', # 0x91
'Feng ', # 0x92
'Hui ', # 0x93
'Dang ', # 0x94
'Ji ', # 0x95
'Sui ', # 0x96
'Bo ', # 0x97
'Bi ', # 0x98
'Ding ', # 0x99
'Chu ', # 0x9a
'Zhua ', # 0x9b
'Kuai ', # 0x9c
'Ji ', # 0x9d
'Jie ', # 0x9e
'Jia ', # 0x9f
'Qing ', # 0xa0
'Zhe ', # 0xa1
'Jian ', # 0xa2
'Qiang ', # 0xa3
'Dao ', # 0xa4
'Yi ', # 0xa5
'Biao ', # 0xa6
'Song ', # 0xa7
'She ', # 0xa8
'Lin ', # 0xa9
'Kunugi ', # 0xaa
'Cha ', # 0xab
'Meng ', # 0xac
'Yin ', # 0xad
'Tao ', # 0xae
'Tai ', # 0xaf
'Mian ', # 0xb0
'Qi ', # 0xb1
'Toan ', # 0xb2
'Bin ', # 0xb3
'Huo ', # 0xb4
'Ji ', # 0xb5
'Qian ', # 0xb6
'Mi ', # 0xb7
'Ning ', # 0xb8
'Yi ', # 0xb9
'Gao ', # 0xba
'Jian ', # 0xbb
'Yin ', # 0xbc
'Er ', # 0xbd
'Qing ', # 0xbe
'Yan ', # 0xbf
'Qi ', # 0xc0
'Mi ', # 0xc1
'Zhao ', # 0xc2
'Gui ', # 0xc3
'Chun ', # 0xc4
'Ji ', # 0xc5
'Kui ', # 0xc6
'Po ', # 0xc7
'Deng ', # 0xc8
'Chu ', # 0xc9
'[?] ', # 0xca
'Mian ', # 0xcb
'You ', # 0xcc
'Zhi ', # 0xcd
'Guang ', # 0xce
'Qian ', # 0xcf
'Lei ', # 0xd0
'Lei ', # 0xd1
'Sa ', # 0xd2
'Lu ', # 0xd3
'Li ', # 0xd4
'Cuan ', # 0xd5
'Lu ', # 0xd6
'Mie ', # 0xd7
'Hui ', # 0xd8
'Ou ', # 0xd9
'Lu ', # 0xda
'Jie ', # 0xdb
'Gao ', # 0xdc
'Du ', # 0xdd
'Yuan ', # 0xde
'Li ', # 0xdf
'Fei ', # 0xe0
'Zhuo ', # 0xe1
'Sou ', # 0xe2
'Lian ', # 0xe3
'Tamo ', # 0xe4
'Chu ', # 0xe5
'[?] ', # 0xe6
'Zhu ', # 0xe7
'Lu ', # 0xe8
'Yan ', # 0xe9
'Li ', # 0xea
'Zhu ', # 0xeb
'Chen ', # 0xec
'Jie ', # 0xed
'E ', # 0xee
'Su ', # 0xef
'Huai ', # 0xf0
'Nie ', # 0xf1
'Yu ', # 0xf2
'Long ', # 0xf3
'Lai ', # 0xf4
'[?] ', # 0xf5
'Xian ', # 0xf6
'Kwi ', # 0xf7
'Ju ', # 0xf8
'Xiao ', # 0xf9
'Ling ', # 0xfa
'Ying ', # 0xfb
'Jian ', # 0xfc
'Yin ', # 0xfd
'You ', # 0xfe
'Ying ', # 0xff
)
| gpl-2.0 |
LevinJ/CodeSamples | lcm/src/exlcm/Image.py | 1 | 2454 | """LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
try:
import cStringIO.StringIO as BytesIO
except ImportError:
from io import BytesIO
import struct
import exlcm.HEADER
class Image(object):
__slots__ = ["header", "nWidth", "nHeight", "gbImageData"]
__typenames__ = ["exlcm.HEADER", "int32_t", "int32_t", "byte"]
__dimensions__ = [None, None, None, ["nHeight", "nWidth"]]
def __init__(self):
self.header = exlcm.HEADER()
self.nWidth = 0
self.nHeight = 0
self.gbImageData = []
def encode(self):
buf = BytesIO()
buf.write(Image._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
assert self.header._get_packed_fingerprint() == exlcm.HEADER._get_packed_fingerprint()
self.header._encode_one(buf)
buf.write(struct.pack(">ii", self.nWidth, self.nHeight))
for i0 in range(self.nHeight):
buf.write(bytearray(self.gbImageData[i0][:self.nWidth]))
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = BytesIO(data)
if buf.read(8) != Image._get_packed_fingerprint():
raise ValueError("Decode error")
return Image._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = Image()
self.header = exlcm.HEADER._decode_one(buf)
self.nWidth, self.nHeight = struct.unpack(">ii", buf.read(8))
self.gbImageData = []
for i0 in range(self.nHeight):
self.gbImageData.append(buf.read(self.nWidth))
return self
_decode_one = staticmethod(_decode_one)
def _get_hash_recursive(parents):
if Image in parents: return 0
newparents = parents + [Image]
tmphash = (0x88f959495096bc7d+ exlcm.HEADER._get_hash_recursive(newparents)) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if Image._packed_fingerprint is None:
Image._packed_fingerprint = struct.pack(">Q", Image._get_hash_recursive([]))
return Image._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| gpl-2.0 |
StevenCHowell/code_sas_modeling | sas_modeling/file_io.py | 1 | 2144 | from __future__ import absolute_import, division, print_function
import errno
import glob
import os
import numpy as np
def mkdir_p(path):
'''
make directory recursively
adapted from http://stackoverflow.com/questions/600268/
'''
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_files(pr_dir, iq_dir):
pr_files = glob.glob(os.path.join(pr_dir, '*.pr'))
iq_files = glob.glob(os.path.join(iq_dir, '*.iq'))
pr_files.sort()
iq_files.sort()
n_iq = len(iq_files)
n_pr = len(pr_files)
if n_iq != n_pr:
logging.warning('mismatch number of files, n_iq: {}, n_pr: {}'.format(
n_iq, n_pr))
print('found {} P(r) and {} I(Q) files'.format(n_pr, n_iq))
return pr_files, iq_files
def load_pr(pr_files):
n_pr = len(pr_files)
pr_data_l = []
n_r = np.empty(n_pr, dtype=int)
# load in all the data set
for i, pr_file in enumerate(pr_files):
n_data = np.loadtxt(pr_file, delimiter=',', dtype=int)
pr_data_l.append(n_data[:, 1])
n_r[i] = len(n_data)
r_max = n_r.max()
r = np.arange(r_max)
pr_data = np.zeros([n_pr, r_max], dtype=int)
for i, n_data in enumerate(pr_data_l):
pr_data[i, :len(n_data)] = n_data
return r, pr_data
def load_iq(iq_files):
iq_data = []
q_max = 0.18 # only use data up to 0.18 1/A
# load in the first data set to setup the q-mask
first_data = np.loadtxt(iq_files[0])
q_mask = first_data[:, 0] <= q_max
first_data = first_data[q_mask]
iq_data.append(first_data[:, 1])
# load in the rest of the data
for iq_file in iq_files[1:]:
x_data = np.loadtxt(iq_file)
x_data = x_data[q_mask]
assert np.allclose(x_data[0, 1], first_data[0, 1]), 'ERROR: data not normalized to I(0)'
assert np.allclose(x_data[:, 0], first_data[:, 0]), 'ERROR: data not on same Q-grid'
iq_data.append(x_data[:, 1])
iq_data = np.array(iq_data)
q = x_data[:, 0]
return q, iq_data | gpl-3.0 |
grepman/detective.io | app/detective/individual.py | 3 | 49728 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from app.detective import graph
from app.detective.neomatch import Neomatch
from app.detective.sustainability import dummy_model_to_ressource
from app.detective.utils import import_class, get_model_topic, \
get_leafs_and_edges, get_topic_from_request, \
iterate_model_fields, topic_cache, \
download_url, \
get_image, is_local
from app.detective.topics.common.models import FieldSource
from app.detective.topics.common.user import UserNestedResource
from app.detective.models import Topic
from app.detective.exceptions import UnavailableImage, NotAnImage, OversizedFile
from app.detective.paginator import resource_paginator
from django import forms
from django.conf import settings
from django.conf.urls import url
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.core.files.storage import default_storage
from django.db.models.query import QuerySet
from django.http import Http404
from neo4jrestclient import client
from neo4django.db import connection
from neo4django.db.models import NodeModel
from neo4django.db.models.relationships import MultipleNodes
from tastypie import fields
from tastypie.authentication import Authentication, SessionAuthentication, BasicAuthentication, MultiAuthentication
from tastypie.authorization import DjangoAuthorization
from tastypie.constants import ALL
from tastypie.exceptions import Unauthorized
from tastypie.resources import ModelResource
from tastypie.serializers import Serializer
from tastypie.utils import trailing_slash
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
import json
import re
import logging
import bleach
import os
logger = logging.getLogger(__name__)
class IndividualAuthorization(DjangoAuthorization):
def get_topic_from_bundle(self, bundle):
topic = get_topic_from_request(bundle.request)
if topic == None:
topic = Topic.objects.get(ontology_as_mod=get_model_topic(bundle.obj)).public
return topic
def check_contribution_permission(self, bundle, operation):
authorized = False
user = bundle.request.user
app_label = bundle.request.current_topic.app_label()
if user:
perm_name = "%s.contribute_%s" % (app_label, operation)
authorized = user.is_staff or user.has_perm(perm_name)
return authorized
def read_detail(self, object_list, bundle):
topic = self.get_topic_from_bundle(bundle)
if not topic.public and not self.check_contribution_permission(bundle, 'read'):
raise Unauthorized("Sorry, only staff or contributors can read resource.")
return True
def read_list(self, object_list, bundle):
topic = self.get_topic_from_bundle(bundle)
if not topic.public and not self.check_contribution_permission(bundle, 'read'):
raise Unauthorized("Sorry, only staff or contributors can read resource.")
return object_list
def create_detail(self, object_list, bundle):
if not self.check_contribution_permission(bundle, 'add'):
raise Unauthorized("Sorry, only staff or contributors can create resource.")
# check if user can add regarding to his plan
topic = get_topic_from_request(bundle.request)
owner_profile = topic.author.detectiveprofileuser
if owner_profile.nodes_max() > -1 and owner_profile.nodes_count()[topic.slug] >= owner_profile.nodes_max():
raise Unauthorized("Sorry, you have to upgrade your plan.")
return True
def update_detail(self, object_list, bundle):
if not self.check_contribution_permission(bundle, 'change'):
raise Unauthorized("Sorry, only staff or contributors can update resource.")
return True
def delete_detail(self, object_list, bundle):
if not self.check_contribution_permission(bundle, 'delete'):
raise Unauthorized("Sorry, only staff or contributors can delete resource.")
return True
def delete_list(self, object_list, bundle):
return False
class IndividualMeta:
list_allowed_methods = ['get', 'post', 'put']
detail_allowed_methods = ['get', 'post', 'delete', 'put', 'patch']
always_return_data = True
authorization = IndividualAuthorization()
authentication = MultiAuthentication(Authentication(), BasicAuthentication(), SessionAuthentication())
filtering = {'name': ALL}
ordering = {'name': ALL}
serializer = Serializer(formats=['json', 'jsonp'])
class FieldSourceResource(ModelResource):
class Meta:
queryset = FieldSource.objects.all()
resource_name = 'auth/user'
excludes = ['individual',]
def dehydrate(self, bundle):
del bundle.data["resource_uri"]
return bundle
class IndividualResource(ModelResource):
field_sources = fields.ToManyField(
FieldSourceResource,
attribute=lambda bundle: FieldSource.objects.filter(individual=bundle.obj.id),
full=True,
null=True,
use_in='detail'
)
def __init__(self, api_name=None):
super(IndividualResource, self).__init__(api_name)
# Pass the current instance of the resource to the paginator
self._meta.paginator_class = resource_paginator(self)
# By default, tastypie detects detail mode globally: it means that
# even into an embeded resource (through a relationship), Tastypie will
# serialize it as if we are in it's detail view.
# We overide 'use_in' for every field with the value "detail"
for field_name, field_object in self.fields.items():
if field_object.use_in == 'detail':
# We use a custom method
field_object.use_in = self.use_in
def prepend_urls(self):
params = (self._meta.resource_name, trailing_slash())
return [
url(r"^(?P<resource_name>%s)/search%s$" % params, self.wrap_view('get_search'), name="api_get_search"),
url(r"^(?P<resource_name>%s)/mine%s$" % params, self.wrap_view('get_mine'), name="api_get_mine"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/patch%s$" % params, self.wrap_view('get_patch'), name="api_get_patch"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/patch/sources%s$" % params, self.wrap_view('get_patch_source'), name="api_get_create_source"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/patch/sources/(?P<source_pk>[0-9]*)%s$" % params, self.wrap_view('get_patch_source'), name="api_get_patch_source"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/authors%s$" % params, self.wrap_view('get_authors'), name="api_get_authors"),
url(r"^(?P<resource_name>%s)/bulk_upload%s$" % params, self.wrap_view('bulk_upload'), name="api_bulk_upload"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/graph%s$" % params, self.wrap_view('get_graph'), name="api_get_graph"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/relationships%s$" % params, self.wrap_view('get_relationships'), name="api_get_relationships"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/relationships/(?P<field>\w[\w-]*)%s$" % params, self.wrap_view('get_relationships'), name="api_get_relationships_field"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/relationships/(?P<field>\w[\w-]*)/(?P<end>\w[\w-]*)%s$" % params, self.wrap_view('get_relationships'), name="api_get_relationships_field_end"),
]
def apply_sorting(self, obj_list, options=None):
options_copy = options.copy()
# No failling sorting,
if "order_by" in options and not options["order_by"] in self.fields:
# remove invalid order_by key
options_copy.pop("order_by", None)
return super(IndividualResource, self).apply_sorting(obj_list, options_copy)
def build_schema(self):
"""
Description and scope for each Resource
"""
schema = super(IndividualResource, self).build_schema()
model = self._meta.queryset.model
additionals = {
"description": getattr(model, "_description", None),
"scope" : getattr(model, "_scope", None)
}
return dict(additionals.items() + schema.items())
def get_queryset(self):
# Resource must implement a queryset!
queryset = getattr(self._meta, "queryset", None)
if not isinstance(queryset, QuerySet):
raise Exception("The given resource must define a queryset.")
return queryset
def get_model(self):
return self.get_queryset().model
def get_topic(self, bundle=None):
model = self.get_model()
topic = None
# Bundle given
if bundle != None:
# The topic may be set by a middleware
topic = get_topic_from_request(bundle.request)
# No topic found
if topic == None:
# We found the topic according to the current model
topic = Topic.objects.get(ontology_as_mod=get_model_topic(model))
return topic
@property
def topic(self):
return self.get_topic()
def get_model_fields(self, name=None, model=None):
if model is None: model = self.get_model()
if name is None:
# Find fields of the queryset's model
return model._meta.fields
else:
fields = [f for f in model._meta.fields if f.name == name]
return fields[0] if len(fields) else None
def get_model_field(self, name, model=None):
if model is None: model = self.get_model()
target = None
for field in self.get_model_fields(model=model):
if field.name == name:
target = field
return target
def need_to_many_field(self, field):
# Limit the definition of the new fields
# to the relationships
return isinstance(field, MultipleNodes) and not field.name.endswith("_set")
# TODO: Find another way!
def dummy_class_to_ressource(self, klass):
return dummy_model_to_ressource(klass)
def get_to_many_field(self, field, full=False):
if type(field.target_model) == str:
target_model = import_class(field.target_model)
else:
target_model = field.target_model
resource = self.dummy_class_to_ressource(target_model)
# Do not create a relationship with an empty resource (not resolved)
if resource: return fields.ToManyField(resource, field.name, full=full, null=True, use_in='detail')
else: return None
def generate_to_many_fields(self, full=False):
# For each model field
for field in self.get_model_fields():
# Limit the definition of the new fields
# to the relationships
if self.need_to_many_field(field):
f = self.get_to_many_field(field, full=bool(full))
# Get the full relationship
if f: self.fields[field.name] = f
def _build_reverse_url(self, name, args=None, kwargs=None):
# This ModelResource respects Django namespaces.
# @see tastypie.resources.NamespacedModelResource
# @see tastypie.api.NamespacedApi
namespaced = "%s:%s" % (self._meta.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs)
def use_in(self, bundle=None):
# Use in detail
return self.get_resource_uri(bundle) == bundle.request.path
def dehydrate(self, bundle):
# Get the request from the bundle
request = bundle.request
# Show additional field following the model's rules
rules = bundle.request.current_topic.get_rules().model( self.get_model() )
# Get the output transformation for this model
transform = rules.get("transform")
# This is just a string
# For complex formating use http://docs.python.org/2/library/string.html#formatspec
if type(transform) is str:
transform = transform.format(**bundle.data)
# We can also receive a function
elif callable(transform):
transform = transform(bundle.data)
bundle.data["_transform"] = transform or getattr(bundle.data, 'name', None)
to_add = dict()
for field in bundle.data:
# Image field
if field == 'image' and self.use_in(bundle):
# Get thumbnails
try:
url_or_path = bundle.data[field]
# Remove host for local url
if is_local(request, url_or_path or ''):
# By removing the host, we'll force django
# to read the file instead of downloading it
url_or_path = url_or_path.split( request.get_host() )[1]
url_or_path = url_or_path.replace(settings.MEDIA_URL, '/')
try:
# Use a file instance and download external only
# on detail view (to avoid heavy loading)
image = get_image(url_or_path, download_external=self.use_in(bundle))
# The given url is not a valid image
except (NotAnImage, OversizedFile):
# Save the new URL to avoid reloading it
setattr(bundle.obj, field, None)
bundle.obj.save()
continue
# The image might be temporary unvailable
except UnavailableImage: continue
# Skip none value
if image is None: continue
# Build the media url using the request
media_url = request.build_absolute_uri(settings.MEDIA_URL)
# Extract public name
public_name = lambda i: os.path.join(media_url, i.replace(settings.MEDIA_ROOT, '').strip('/') )
# Return the public url
bundle.data[field] = public_name(image.name)
# The image url changed...
if getattr(bundle.obj, field) != public_name(image.name):
# Save the new URL to avoid reloading it
setattr(bundle.obj, field, public_name(image.name))
bundle.obj.save()
# Create thumbnailer with the file
thumbnailer = get_thumbnailer(image.name)
to_add[field + '_thumbnail'] = {
key : public_name(thumbnailer.get_thumbnail({
'size': size,
'crop': True
}).name)
for key, size in settings.THUMBNAIL_SIZES.items()
}
except InvalidImageFormatError:
to_add[field + '_thumbnail'] = ''
# Ignore missing image error
except IOError:
to_add[field] = ''
# Removes unusable image
setattr(bundle.obj, field, None)
bundle.obj.save()
# Convert tuple to array for better serialization
if type( getattr(bundle.obj, field, None) ) is tuple:
bundle.data[field] = list( getattr(bundle.obj, field) )
# Get the output transformation for this field
transform = rules.field(field).get("transform", None)
# This is just a string
# For complex formating use http://docs.python.org/2/library/string.html#formatspec
if type(transform) is str:
bundle.data[field] = transform.format(**bundle.data)
# We can also receive a function
elif callable(transform):
bundle.data[field] = transform(bundle.data, field)
for key in to_add.keys():
bundle.data[key] = to_add[key]
return bundle
def get_model_node(self):
# Wraps the graph method with the model of this ressource
return graph.get_model_node( self.get_model() )
def obj_create(self, bundle, **kwargs):
# Feed request object with the bundle
request = bundle.request
# Since we are not using the native save method
# we need to check autorization here
self.authorized_create_detail(self.get_object_list(request), bundle)
# The only field allowed during creation is "name"
data = dict(name=bundle.data.get("name", None), _author=[request.user.id])
data = self.validate(data)
# Model class
model = self.get_model()
# Find the node associate to this model
model_node = self.get_model_node()
# Start a transaction to batch insert values
with connection.transaction(commit=False) as tx:
# Create a brand new node
node = connection.nodes.create(**data)
# Instanciate its type
connection.relationships.create(model_node, "<<INSTANCE>>", node)
# Commit the transaction
tx.commit()
# Create an object to build the bundle
obj = node.properties
obj["id"] = node.id
# update the cache
topic_cache.incr_version(request.current_topic)
# Return a new bundle
return self.build_bundle(obj=model._neo4j_instance(node), data=obj, request=request)
def obj_get(self, **kwargs):
pk = kwargs["pk"]
# Current model
model = self.get_model()
# Get the node's data using the rest API
try: node = connection.nodes.get(pk)
# Node not found
except client.NotFoundError: raise Http404("Not found.")
# Create a model istance from the node
return model._neo4j_instance( node )
def get_detail(self, request, **kwargs):
basic_bundle = self.build_bundle(request=request)
kwargs["bundle"] = basic_bundle
obj = self.obj_get(**kwargs)
bundle = self.build_bundle(obj=obj, request=request)
bundle = self.full_dehydrate(bundle)
bundle = self.alter_detail_data_to_serialize(request, bundle, True)
return self.create_response(request, bundle)
def alter_detail_data_to_serialize(self, request, bundle, nested=False):
model = self.get_model()
# Get relationships fields
fields = [ f for f in model._meta.fields if f.get_internal_type() == 'Relationship']
node_rels = bundle.obj.node.relationships.all()
# If the nested parameter is True, this set
node_to_retreive = set()
# Resolve relationships manualy
for field in fields:
# Get relationships for this fields
field_rels = [ rel for rel in node_rels[:] if rel.type == field._type]
# Filter relationships to keep only the well oriented relationships
# get the related field informations
related_field = [f for f in iterate_model_fields(model) if "rel_type" in f and f["rel_type"] == field._type and "name" in f and f["name"] == field._BoundRelationship__attname]
if related_field:
# Note (edouard): check some assertions in case I forgot something
assert len(related_field) == 1, related_field
assert related_field[0]["direction"]
# choose the end point to check
end_point_side = "start" if related_field[0]["direction"] == "out" else "end"
# filter the relationship
field_rels = [rel for rel in field_rels if getattr(rel, end_point_side).id == bundle.obj.id]
# Get node ids for those relationships
field_oposites = [ graph.opposite(rel, bundle.obj.id) for rel in field_rels ]
# Save the list into properities
bundle.data[field.name] = field_oposites
# Nested mode to true: we need to retreive every node
if nested: node_to_retreive = set(list(node_to_retreive) + field_oposites)
# There is node to extract for the graph
if len(node_to_retreive):
# Build the query to get all node in one request
query = "start n=node(%s) RETURN ID(n), n" % ",".join(map(str, node_to_retreive))
# Get all nodes as raw values to avoid unintended request to the graph
nodes = connection.query(query, returns=(int, dict))
# Helper lambda to retreive a node
retreive_node = lambda idx: next(n[1]["data"] for n in nodes if n[0] == idx)
# Populate the relationships field with there node instance
for field in fields:
# Retreive the list of ids
for i, idx in enumerate(bundle.data[field.name]):
rel_node = retreive_node(idx)
# Save the id which is not a node property
rel_node["id"] = idx
# Update value
bundle.data[field.name][i] = self.validate(rel_node, field.target_model, allow_missing=True)
# Show additional field following the model's rules
rules = request.current_topic.get_rules().model(self.get_model()).all()
# All additional relationships
for key in rules:
# Filter rules to keep only Neomatch instance.
# Neomatch is a class to create programmaticly a search related to
# this node.
if isinstance(rules[key], Neomatch):
bundle.data[key] = rules[key].query(bundle.obj.id)
return bundle
def get_search(self, request, **kwargs):
self.method_check(request, allowed=['get'])
self.throttle_check(request)
query = request.GET.get('q', '').lower()
query = re.sub("\"|'|`|;|:|{|}|\|(|\|)|\|", '', query).strip()
limit = int( request.GET.get('limit', 20))
# Do the query.
results = self._meta.queryset.filter(name__icontains=query)
# For retro compatibility we use the django paginator
paginator = resource_paginator(self)(request.GET, results, resource_uri=self.get_resource_uri(), limit=limit, collection_name=self._meta.collection_name)
to_be_serialized = paginator.page()
# Dehydrate the bundles in preparation for serialization.
bundles = []
for obj in to_be_serialized[self._meta.collection_name]:
bundle = self.build_bundle(obj=obj, request=request)
bundles.append(self.full_dehydrate(bundle, for_list=True))
to_be_serialized[self._meta.collection_name] = bundles
to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized)
return self.create_response(request, to_be_serialized)
def get_mine(self, request, **kwargs):
self.method_check(request, allowed=['get'])
self.throttle_check(request)
limit = int(request.GET.get('limit', 20))
if request.user.id is None:
return self.create_response(request, {
'objects': [],
'meta': {
'author': request.user,
'limit': limit,
'total_count': 0
}
})
else:
# Do the query.
results = self._meta.queryset.filter(_author__contains=request.user.id)
# For retro compatibility we use the django paginator
paginator = resource_paginator(self)(request.GET, results, resource_uri=self.get_resource_uri(), limit=limit, collection_name=self._meta.collection_name)
to_be_serialized = paginator.page()
# Dehydrate the bundles in preparation for serialization.
bundles = []
for obj in to_be_serialized[self._meta.collection_name]:
bundle = self.build_bundle(obj=obj, request=request)
bundles.append(self.full_dehydrate(bundle, for_list=True))
to_be_serialized[self._meta.collection_name] = bundles
to_be_serialized = self.alter_list_data_to_serialize(request, to_be_serialized)
return self.create_response(request, to_be_serialized)
def convert(self, properties, model=None):
if model is None: model = self.get_model()
validate = False
# Iterate until the whole properties object validates
while not validate:
try:
self.validate(properties, model=model)
validate = True
except ValidationError as e:
# Convert each key
for key in e.message_dict.keys():
value = self.convert_field(key, properties[key], model=model)
# Skip unconvertible values
if value is None: del properties[key]
# Save the value
else: properties[key] = value
return properties
def convert_field(self, name, value, model=None):
if model is None: model = self.get_model()
# Find the model's field
field = self.get_model_field(name, model)
# Find the field type
fieldtype = field._property.get_internal_type()
# Get the field widget
formfield = field._property.formfield()
# Choose the best way to convert
try:
if fieldtype == 'BooleanField':
return bool(value)
elif fieldtype == 'CharField':
return str(value)
elif fieldtype == 'DateTimeField':
return forms.DateTimeField().clean(value)
else:
return formfield.clean(value)
# Wrong convettion result to a None value
except (ValueError, TypeError, ValidationError):
return None
def validate(self, data, model=None, allow_missing=False):
if model is None: model = self.get_model()
cleaned_data = {}
for field_name in data:
field = self.get_model_field(field_name, model)
if field is not None:
# Boolean field must be validate manually
if field.get_internal_type() == 'BooleanField':
if type(data[field_name]) is not bool:
raise ValidationError({field_name: 'Must be a boolean value'})
if not allow_missing:
raise ValidationError({field_name: 'Must be a boolean value'})
# Skip this field
else: continue
cleaned_data[field_name] = data[field_name]
# DateTime field must be validate manually
elif field.get_internal_type() == 'DateTimeField':
# Create a native datetimefield
formfield = forms.DateTimeField(input_formats=settings.DATETIME_FORMATS, required=False)
try:
# Validate and clean the data
cleaned_data[field_name] = formfield.clean(data[field_name])
except ValidationError as e:
# Raise the same error the field name as key
if not allow_missing: raise ValidationError({field_name: 'Must be a valid date/time'})
# Skip this field
else: continue
# Only literal values have a _property attribute
elif hasattr(field, "_property"):
try:
try:
# Get a single field validator
formfield = field._property.formfield()
# Validate and clean data
cleaned_data[field_name] = formfield.clean(data[field_name])
except TypeError:
validators = getattr(field._property, "validators", [])
# This field has several validators
for validator in field._property.validators:
# Process validation with every validator
validator(data[field_name])
# @warning: this will validate the data for
# array of values but not clean them
cleaned_data[field_name] = data[field_name]
except ValidationError as e:
# Raise the same error the field name as key
if not allow_missing: raise ValidationError({field_name: e.messages})
# The given value is a relationship
elif hasattr(field, "target_model") and type(data[field_name]) is list:
# The validation method will collect targets ID
cleaned_data[field_name] = []
# Relationships can be added using to ways:
# * a list of numeric id
# * a list of objects containing an id key (final formatspec)
for rel in data[field_name]:
# Common error message
error = "Bad relationship value"
# Evaluate the relation as a string:
if type(rel) is str:
# it must be a numeric value
if rel.isnumeric():
# We can add the value to the list.
# We take care of casting it to integer.
cleaned_data[field_name].append( int(rel) )
elif not allow_missing:
raise ValidationError({field_name: error})
# This is an integer, we're just passing
elif type(rel) is int:
# We can add the value to the list
cleaned_data[field_name].append(rel)
# This is an object
elif type(rel) is dict:
# The given object as no ID
if "id" not in rel:
raise ValidationError({field_name: error})
elif not allow_missing:
# Add and cast the value
cleaned_data[field_name].append( int(rel["id"]) )
# Treat id
elif field.name == "id": cleaned_data[field_name] = int(data[field_name])
return cleaned_data
def obj_delete(self, bundle, **kwargs):
super(IndividualResource, self).obj_delete(bundle, **kwargs)
# update the cache
topic_cache.incr_version(bundle.request.current_topic)
def get_patch(self, request, **kwargs):
pk = kwargs["pk"]
# This should be a POST request
self.method_check(request, allowed=['post'])
self.throttle_check(request)
# User must be authentication
self.is_authenticated(request)
bundle = self.build_bundle(request=request)
# User allowed to update this model
self.authorized_update_detail(self.get_object_list(bundle.request), bundle)
# Get the node's data using the rest API
try: node = connection.nodes.get(pk)
# Node not found
except client.NotFoundError: raise Http404("Not found.")
# Load every relationship only when we need to update a relationship
node_rels = None
# Parse only body string
body = json.loads(request.body) if type(request.body) is str else request.body
# Copy data to allow dictionary resizing
data = body.copy()
# Received per-field sources
if "field_sources" in data:
# field_sources must not be treated here, see patch_source method
field_sources = data.pop("field_sources")
# Validate data.
# If it fails, it will raise a ValidationError
data = self.validate(data)
# Get author list (or a new array if )
author_list = node.properties.get("_author", [])
# This is the first time the current user edit this node
if int(request.user.id) not in author_list:
# Add the author to the author list
data["_author"] = author_list + [request.user.id]
# @TODO check that 'node' is an instance of 'model'
# Set new values to the node
for field_name in data:
field = self.get_model_field(field_name)
field_value = data[field_name]
# The value can be a list of ID for relationship
if field.get_internal_type() is 'Relationship':
# Pluck id from the list
field_ids = [ value for value in field_value if value is not int(pk) ]
# Prefetch all relationship
if node_rels is None: node_rels = node.relationships.all()
# Get relationship name
rel_type = self.get_model_field(field_name)._type
# We don't want to add this relation twice so we extract
# every node connected to the current one through this type
# of relationship. "existing_rels_id" will contain the ids of
# every node related to this one.
existing_rels = [ rel for rel in node_rels if rel.type == rel_type ]
existing_rels_id = [ graph.opposite(rel, pk) for rel in existing_rels ]
# Get every ids from "field_ids" that ain't not in
# the list of existing relationship "existing_rel_id".
new_rels_id = set(field_ids).difference(existing_rels_id)
# Get every ids from "existing_rels_id" that ain't no more
# in the new list of relationships "field_ids".
old_rels_id = set(existing_rels_id).difference(field_ids)
# Start a transaction to batch import values
with connection.transaction(commit=False) as tx:
# Convert ids or related node to *node* instances
new_rels_node = [ connection.nodes.get(idx) for idx in new_rels_id ]
# Convert ids or unrelated node to *relationships* instances
old_rels = []
# Convert ids list into relationship instances
for idx in old_rels_id:
# Find the relationship that match with this id
matches = [ rel for rel in existing_rels if graph.connected(rel, idx) ]
# Merge the list of relationships
old_rels = old_rels + matches
# Commit change when every field was treated
tx.commit()
# Start a transaction to batch insert/delete values
with connection.transaction(commit=False) as tx:
# Then create the new relationships (using nodes instances)
# Outcoming relationship
if field.direction == 'out':
[ connection.relationships.create(node, rel_type, n) for n in new_rels_node ]
# Incoming relationship
elif field.direction == 'in':
[ connection.relationships.create(n, rel_type, node) for n in new_rels_node ]
# Then delete the old relationships (using relationships instance)
[ rel.delete() for rel in old_rels ]
# Commit change when every field was treated
tx.commit()
# Or a literal value
# (integer, date, url, email, etc)
else:
# Current model
model = self.get_model()
# Fields
fields = { x['name'] : x for x in iterate_model_fields(model) }
# Remove the values
if field_value in [None, '']:
if field_name == 'image' and fields[field_name]['type'] == 'URLField':
self.remove_node_file(node, field_name, True)
# The field may not exists (yet)
try:
node.delete(field_name)
# It's OK, it just means we don't have to remove it
except client.NotFoundError: pass
# We simply update the node property
# (the value is already validated)
else:
if field_name in fields:
if 'is_rich' in fields[field_name]['rules'] and fields[field_name]['rules']['is_rich']:
data[field_name] = field_value = bleach.clean(field_value,
tags=("br", "blockquote", "ul", "ol",
"li", "b", "i", "u", "a", "p", "div", "span"),
attributes={
'*': ("class",),
'a': ("href", "target")
})
if field_name == 'image' and fields[field_name]['type'] == 'URLField':
self.remove_node_file(node, field_name, True)
try:
# Download the image
image_file = download_url(data[field_name])
# New Image path
path = os.path.join(settings.UPLOAD_ROOT, image_file.name)
# Removed the media root
path = path.replace(settings.MEDIA_ROOT, "")
# Store the image
path = default_storage.save(path, image_file)
host = settings.MEDIA_URL
# The path must start with host name
if not host.startswith("http"):
# If not, we append the request URL
# because if means that we are using a local path
host = request.build_absolute_uri(settings.MEDIA_URL)
# Join the path to the file and the MEDIA_URL
path = "/".join([ host.strip("/"), path.strip("/") ])
# Save the value
data[field_name] = field_value = path
except UnavailableImage:
data[field_name] = field_value = ""
except NotAnImage:
data[field_name] = field_value = ""
except OversizedFile:
data[field_name] = field_value = ""
node.set(field_name, field_value)
# update the cache
topic_cache.incr_version(request.current_topic)
# And returns cleaned data
return self.create_response(request, data)
def get_patch_source(self, request, **kwargs):
import time
start_time = time.time()
def delete_source(source_id):
node = connection.nodes.get(source_id)
rels = node.relationships.all()
[ rel.delete() for rel in rels ]
deleted = node.delete()
return None
def update_source(individual, source_id, data):
res = {}
src_node = connection.nodes.get(source_id)
src_node['reference'] = data['reference']
res = data
return res
def create_source(individual, data):
res = {}
# took from neo4django.db.base.NodeModel._save_node_model
type_hier_props = [{'app_label': t._meta.app_label,
'model_name': t.__name__} for t in FieldSource._concrete_type_chain()]
type_hier_props = list(reversed(type_hier_props))
#get all the names of all types, including abstract, for indexing
type_names_to_index = [t._type_name() for t in FieldSource.mro()
if (issubclass(t, NodeModel) and t is not NodeModel)]
create_groovy = '''
node = Neo4Django.createNodeWithTypes(types)
Neo4Django.indexNodeAsTypes(node, indexName, typesToIndex)
node.field = field
node.individual = individual
node.reference = reference
results = node
'''
data.update({'individual': individual.id})
node = connection.gremlin_tx(create_groovy, types=type_hier_props,
indexName=FieldSource.index_name(),
typesToIndex=type_names_to_index, **data)
# for field, val in data.items():
# node.set(field, val)
res['id'] = node.id
res.update(data)
# tx.commit()
# remove added individual
if res.get('individual'):
del res['individual']
return res
pk = kwargs["pk"]
individual = None
source_id = kwargs.get('source_pk')
# This should be a POST request
self.method_check(request, allowed=['post', 'delete'])
self.throttle_check(request)
# User must be authentication
self.is_authenticated(request)
bundle = self.build_bundle(request=request)
# User allowed to update this model
self.authorized_update_detail(self.get_object_list(bundle.request), bundle)
# Get the node's data using the rest API
try: individual = connection.nodes.get(pk)
# Node not found
except client.NotFoundError: raise Http404("Not found.")
source = None
if request.method == 'POST':
body = json.loads(request.body)
data = body.copy()
if source_id != None:
if data.get('reference') in ['', None]:
source = delete_source(source_id)
else:
source = update_source(individual, source_id, data)
else:
source = create_source(individual, data)
elif request.method == 'DELETE':
delete_source(source_id)
return self.create_response(request, source)
def get_authors(self, request, **kwargs):
pk = kwargs["pk"]
# This should be a POST request
self.method_check(request, allowed=['get'])
self.throttle_check(request)
# User must be authentication
self.is_authenticated(request)
bundle = self.build_bundle(request=request)
# Resource to returns
resource = UserNestedResource()
# User must be the author of the topic
if not request.user.is_staff and request.user.id != self.get_topic(bundle).author.id:
# Returns an empty set of authors
return resource.create_response(request, [])
# Get the node's data using the rest API
try: node = connection.nodes.get(pk)
# Node not found
except client.NotFoundError: raise Http404("Not found.")
# Get the authors ids
authors_ids = node.properties.get("_author", [])
# Find them in the database
authors = User.objects.filter(id__in=authors_ids).select_related("profile")
# Create a bundle with each resources
bundles = [resource.build_bundle(obj=a, request=request) for a in authors]
data = [resource.full_dehydrate(b)for b in bundles]
# We ask for relationship properties
return resource.create_response(request, data)
def get_relationships(self, request, **kwargs):
# Extract node id from given node uri
def node_id(uri) : return re.search(r'(\d+)$', uri).group(1)
# Get the end of the given relationship
def rel_from(rel, side): return node_id(rel.__dict__["_dic"][side])
# Is the given relation connected to the given uri
def connected(rel, idx): return rel_from(rel, "end") == idx or rel_from(rel, "start") == idx
self.method_check(request, allowed=['get'])
self.throttle_check(request)
pk = kwargs['pk']
node = connection.nodes.get(pk)
# Only the relationships for a given field
if "field" in kwargs:
field = self.get_model_fields(kwargs["field"])
# Unkown type
if field is None: raise Http404("Unkown relationship field.")
reltype = getattr(field, "rel_type", None)
# Not a relationship
if reltype is None: raise Exception("The given field is not a relationship.")
rels = node.relationships.all(types=[reltype])
# We want to filter the relationships with an other node
if "end" in kwargs:
end = kwargs["end"]
# Then filter the relations
ids = [ rel.id for rel in rels if connected(rel, end) ]
if len(ids):
# Show additional field following the model's rules
rules = request.current_topic.get_rules()
# Model that manages properties
though = rules.model( self.get_model() ).field(kwargs["field"]).get("through")
if though:
# Get the properties for this relationship
try:
properties = though.objects.get(_relationship=ids[0])
except though.DoesNotExist:
endnodes = [ int(pk), int(end) ]
# We ask for relationship properties
return self.create_response(request, {
"_relationship": ids[0],
"_endnodes": endnodes
})
else:
# Get the module for this model
module = self.dummy_class_to_ressource(though)
# Instanciate the resource
resource = import_class(module)()
# Create a bundle with this resource
bundle = resource.build_bundle(obj=properties, request=request)
bundle = resource.full_dehydrate(bundle, for_list=True)
# We ask for relationship properties
return resource.create_response(request, bundle)
else:
# No relationship
return self.create_response(request, { "_relationship": None })
# All relationship
else:
rels = node.relationships.all()
# Only returns IDS
ids = [ rel.id for rel in rels ]
return self.create_response(request, ids)
def get_graph(self, request, **kwargs):
self.method_check(request, allowed=['get'])
self.throttle_check(request)
depth = int(request.GET['depth']) if 'depth' in request.GET.keys() else 2
topic = Topic.objects.get(ontology_as_mod=get_model_topic(self.get_model()))
leafs, edges = get_leafs_and_edges(
topic = topic,
depth = depth,
root_node = kwargs['pk'])
self.log_throttled_access(request)
return self.create_response(request, {'leafs': leafs, 'edges' : edges})
def remove_node_file(self, node, field_name, thumbnails=False):
try:
file_name = os.path.join(settings.MEDIA_ROOT, node.get(field_name).strip('/'))
default_storage.delete(file_name)
if thumbnails:
extension = file_name.split('.')[-1].lower().replace('jpeg', 'jpg')
suffixes = [".{0}x{1}_q85_crop.".format(size[0], size[1]) for size in settings.THUMBNAIL_SIZES.values()]
for suffix in suffixes:
full_file_name = "{0}{1}{2}".format(file_name, suffix, extension)
if default_storage.exists(full_file_name):
default_storage.delete(full_file_name)
except:
pass
# EOF
| lgpl-3.0 |
nttks/jenkins-test | common/djangoapps/lang_pref/tests/test_api.py | 42 | 1052 | # -*- coding: utf-8 -*-
""" Tests for the language API. """
from django.test import TestCase
import ddt
from lang_pref import api as language_api
@ddt.ddt
class LanguageApiTest(TestCase):
INVALID_LANGUAGE_CODES = ['', 'foo']
def test_released_languages(self):
released_languages = language_api.released_languages()
self.assertGreaterEqual(len(released_languages), 1)
def test_preferred_language(self):
preferred_language = language_api.preferred_language('fr')
self.assertEqual(preferred_language, language_api.Language('fr', u'Français'))
@ddt.data(*INVALID_LANGUAGE_CODES)
def test_invalid_preferred_language(self, language_code):
preferred_language = language_api.preferred_language(language_code)
self.assertEqual(preferred_language, language_api.Language('en', u'English'))
def test_no_preferred_language(self):
preferred_language = language_api.preferred_language(None)
self.assertEqual(preferred_language, language_api.Language('en', u'English'))
| agpl-3.0 |
mrocklin/streams | examples/scrape.py | 3 | 1532 | from __future__ import print_function
from time import sleep
import sys
from BeautifulSoup import BeautifulSoup # Python 2 only, sorry.
import requests
from streamz import Stream
import toolz
import urlparse
def links_of_page((content, page)):
uri = urlparse.urlparse(page)
domain = '%s://%s' % (uri.scheme, uri.netloc)
try:
soup = BeautifulSoup(content)
except:
return []
else:
links = [link.get('href') for link in soup.findAll('a')]
return [domain + link
for link in links
if link
and link.startswith('/')
and '?' not in link
and link != '/']
def topk_dict(d, k=10):
return dict(toolz.topk(k, d.items(), key=lambda x: x[1]))
source = Stream()
pages = source.unique()
pages.sink(print)
content = (pages.map(requests.get)
.map(lambda x: x.content))
links = (content.zip(pages)
.map(links_of_page)
.concat())
links.sink(source.emit)
"""
from nltk.corpus import stopwords
stopwords = set(stopwords.words('english'))
word_counts = (content.map(str.split)
.concat()
.filter(str.isalpha)
.remove(stopwords.__contains__)
.frequencies())
top_words = (word_counts.map(topk_dict, k=10)
.map(frozenset)
.unique(history=10))
top_words.sink(print)
"""
if len(sys.argv) > 1:
source.emit(sys.argv[1])
#
| bsd-3-clause |
nanolearning/edx-platform | lms/djangoapps/dashboard/git_import.py | 3 | 10670 | """
Provides a function for importing a git repository into the lms
instance when using a mongo modulestore
"""
import os
import re
import StringIO
import subprocess
import logging
from django.conf import settings
from django.core import management
from django.core.management.base import CommandError
from django.utils import timezone
from django.utils.translation import ugettext as _
import mongoengine
from dashboard.models import CourseImportLog
from opaque_keys import InvalidKeyError
from xmodule.modulestore.keys import CourseKey
from xmodule.modulestore.locations import SlashSeparatedCourseKey
log = logging.getLogger(__name__)
GIT_REPO_DIR = getattr(settings, 'GIT_REPO_DIR', '/edx/var/app/edxapp/course_repos')
GIT_IMPORT_STATIC = getattr(settings, 'GIT_IMPORT_STATIC', True)
class GitImportError(Exception):
"""
Exception class for handling the typical errors in a git import.
"""
NO_DIR = _("Path {0} doesn't exist, please create it, "
"or configure a different path with "
"GIT_REPO_DIR").format(GIT_REPO_DIR)
URL_BAD = _('Non usable git url provided. Expecting something like:'
' git@github.com:mitocw/edx4edx_lite.git')
BAD_REPO = _('Unable to get git log')
CANNOT_PULL = _('git clone or pull failed!')
XML_IMPORT_FAILED = _('Unable to run import command.')
UNSUPPORTED_STORE = _('The underlying module store does not support import.')
# Translators: This is an error message when they ask for a
# particular version of a git repository and that version isn't
# available from the remote source they specified
REMOTE_BRANCH_MISSING = _('The specified remote branch is not available.')
# Translators: Error message shown when they have asked for a git
# repository branch, a specific version within a repository, that
# doesn't exist, or there is a problem changing to it.
CANNOT_BRANCH = _('Unable to switch to specified branch. Please check '
'your branch name.')
def cmd_log(cmd, cwd):
"""
Helper function to redirect stderr to stdout and log the command
used along with the output. Will raise subprocess.CalledProcessError if
command doesn't return 0, and returns the command's output.
"""
output = subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT)
log.debug('Command was: {0!r}. '
'Working directory was: {1!r}'.format(' '.join(cmd), cwd))
log.debug('Command output was: {0!r}'.format(output))
return output
def switch_branch(branch, rdir):
"""
This will determine how to change the branch of the repo, and then
use the appropriate git commands to do so.
Raises an appropriate GitImportError exception if there is any issues with changing
branches.
"""
# Get the latest remote
try:
cmd_log(['git', 'fetch', ], rdir)
except subprocess.CalledProcessError as ex:
log.exception('Unable to fetch remote: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_BRANCH)
# Check if the branch is available from the remote.
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/{0}'.format(branch), ]
try:
output = cmd_log(cmd, rdir)
except subprocess.CalledProcessError as ex:
log.exception('Getting a list of remote branches failed: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_BRANCH)
if not branch in output:
raise GitImportError(GitImportError.REMOTE_BRANCH_MISSING)
# Check it the remote branch has already been made locally
cmd = ['git', 'branch', '-a', ]
try:
output = cmd_log(cmd, rdir)
except subprocess.CalledProcessError as ex:
log.exception('Getting a list of local branches failed: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_BRANCH)
branches = []
for line in output.split('\n'):
branches.append(line.replace('*', '').strip())
if branch not in branches:
# Checkout with -b since it is remote only
cmd = ['git', 'checkout', '--force', '--track',
'-b', branch, 'origin/{0}'.format(branch), ]
try:
cmd_log(cmd, rdir)
except subprocess.CalledProcessError as ex:
log.exception('Unable to checkout remote branch: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_BRANCH)
# Go ahead and reset hard to the newest version of the branch now that we know
# it is local.
try:
cmd_log(['git', 'reset', '--hard', 'origin/{0}'.format(branch), ], rdir)
except subprocess.CalledProcessError as ex:
log.exception('Unable to reset to branch: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_BRANCH)
def add_repo(repo, rdir_in, branch=None):
"""
This will add a git repo into the mongo modulestore.
If branch is left as None, it will fetch the most recent
version of the current branch.
"""
# pylint: disable=R0915
# Set defaults even if it isn't defined in settings
mongo_db = {
'host': 'localhost',
'user': '',
'password': '',
'db': 'xlog',
}
# Allow overrides
if hasattr(settings, 'MONGODB_LOG'):
for config_item in ['host', 'user', 'password', 'db', ]:
mongo_db[config_item] = settings.MONGODB_LOG.get(
config_item, mongo_db[config_item])
if not os.path.isdir(GIT_REPO_DIR):
raise GitImportError(GitImportError.NO_DIR)
# pull from git
if not (repo.endswith('.git') or
repo.startswith(('http:', 'https:', 'git:', 'file:'))):
raise GitImportError(GitImportError.URL_BAD)
if rdir_in:
rdir = os.path.basename(rdir_in)
else:
rdir = repo.rsplit('/', 1)[-1].rsplit('.git', 1)[0]
log.debug('rdir = {0}'.format(rdir))
rdirp = '{0}/{1}'.format(GIT_REPO_DIR, rdir)
if os.path.exists(rdirp):
log.info('directory already exists, doing a git pull instead '
'of git clone')
cmd = ['git', 'pull', ]
cwd = rdirp
else:
cmd = ['git', 'clone', repo, ]
cwd = GIT_REPO_DIR
cwd = os.path.abspath(cwd)
try:
ret_git = cmd_log(cmd, cwd=cwd)
except subprocess.CalledProcessError as ex:
log.exception('Error running git pull: %r', ex.output)
raise GitImportError(GitImportError.CANNOT_PULL)
if branch:
switch_branch(branch, rdirp)
# get commit id
cmd = ['git', 'log', '-1', '--format=%H', ]
try:
commit_id = cmd_log(cmd, cwd=rdirp)
except subprocess.CalledProcessError as ex:
log.exception('Unable to get git log: %r', ex.output)
raise GitImportError(GitImportError.BAD_REPO)
ret_git += '\nCommit ID: {0}'.format(commit_id)
# get branch
cmd = ['git', 'symbolic-ref', '--short', 'HEAD', ]
try:
branch = cmd_log(cmd, cwd=rdirp)
except subprocess.CalledProcessError as ex:
# I can't discover a way to excercise this, but git is complex
# so still logging and raising here in case.
log.exception('Unable to determine branch: %r', ex.output)
raise GitImportError(GitImportError.BAD_REPO)
ret_git += '{0}Branch: {1}'.format(' \n', branch)
# Get XML logging logger and capture debug to parse results
output = StringIO.StringIO()
import_log_handler = logging.StreamHandler(output)
import_log_handler.setLevel(logging.DEBUG)
logger_names = ['xmodule.modulestore.xml_importer', 'git_add_course',
'xmodule.modulestore.xml', 'xmodule.seq_module', ]
loggers = []
for logger_name in logger_names:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
logger.addHandler(import_log_handler)
loggers.append(logger)
try:
management.call_command('import', GIT_REPO_DIR, rdir,
nostatic=not GIT_IMPORT_STATIC)
except CommandError:
raise GitImportError(GitImportError.XML_IMPORT_FAILED)
except NotImplementedError:
raise GitImportError(GitImportError.UNSUPPORTED_STORE)
ret_import = output.getvalue()
# Remove handler hijacks
for logger in loggers:
logger.setLevel(logging.NOTSET)
logger.removeHandler(import_log_handler)
course_key = None
location = 'unknown'
# extract course ID from output of import-command-run and make symlink
# this is needed in order for custom course scripts to work
match = re.search(r'(?ms)===> IMPORTING course (\S+)', ret_import)
if match:
course_id = match.group(1)
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
cdir = '{0}/{1}'.format(GIT_REPO_DIR, course_key.course)
log.debug('Studio course dir = {0}'.format(cdir))
if os.path.exists(cdir) and not os.path.islink(cdir):
log.debug(' -> exists, but is not symlink')
log.debug(subprocess.check_output(['ls', '-l', ],
cwd=os.path.abspath(cdir)))
try:
os.rmdir(os.path.abspath(cdir))
except OSError:
log.exception('Failed to remove course directory')
if not os.path.exists(cdir):
log.debug(' -> creating symlink between {0} and {1}'.format(rdirp, cdir))
try:
os.symlink(os.path.abspath(rdirp), os.path.abspath(cdir))
except OSError:
log.exception('Unable to create course symlink')
log.debug(subprocess.check_output(['ls', '-l', ],
cwd=os.path.abspath(cdir)))
# store import-command-run output in mongo
mongouri = 'mongodb://{user}:{password}@{host}/{db}'.format(**mongo_db)
try:
if mongo_db['user'] and mongo_db['password']:
mdb = mongoengine.connect(mongo_db['db'], host=mongouri)
else:
mdb = mongoengine.connect(mongo_db['db'], host=mongo_db['host'])
except mongoengine.connection.ConnectionError:
log.exception('Unable to connect to mongodb to save log, please '
'check MONGODB_LOG settings')
cil = CourseImportLog(
course_id=course_key,
location=location,
repo_dir=rdir,
created=timezone.now(),
import_log=ret_import,
git_log=ret_git,
)
cil.save()
log.debug('saved CourseImportLog for {0}'.format(cil.course_id))
mdb.disconnect()
| agpl-3.0 |
ellipsis14/dolfin | test/unit/python/ufl-jit-assemble-chain/test_form_operations.py | 3 | 1505 | #!usr/bin/env py.test
"""Tests for DOLFIN integration of various form operations"""
# Copyright (C) 2011 Marie E. Rognes
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2011-11-14
# Last changed: 2011-11-14
from dolfin import *
def test_lhs_rhs_simple():
"""Test taking lhs/rhs of DOLFIN specific forms (constants
without cell). """
mesh = RectangleMesh(Point(0, 0), Point(2, 1), 3, 5)
V = FunctionSpace(mesh, "CG", 1)
f = Constant(2.0)
g = Constant(3.0)
v = TestFunction(V)
u = TrialFunction(V)
F = inner(g*grad(f*v), grad(u))*dx + f*v*dx
a, L = system(F)
Fl = lhs(F)
Fr = rhs(F)
a0 = inner(grad(v), grad(u))*dx
n = assemble(a).norm("frobenius")
nl = assemble(Fl).norm("frobenius")
n0 = 6.0*assemble(a0).norm("frobenius")
assert round(n - n0, 7) == 0
assert round(n - nl, 7) == 0
| gpl-3.0 |
simonwydooghe/ansible | lib/ansible/modules/network/junos/junos_scp.py | 10 | 5404 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: junos_scp
version_added: "2.5"
author: "Christian Giese (@GIC-de)"
short_description: Transfer files from or to remote devices running Junos
description:
- This module transfers files via SCP from or to remote devices
running Junos.
extends_documentation_fragment: junos
options:
src:
description:
- The C(src) argument takes a single path, or a list of paths to be
transferred. The argument C(recursive) must be C(true) to transfer
directories.
required: true
dest:
description:
- The C(dest) argument specifies the path in which to receive the files.
default: '.'
recursive:
description:
- The C(recursive) argument enables recursive transfer of files and
directories.
type: bool
default: 'no'
remote_src:
description:
- The C(remote_src) argument enables the download of files (I(scp get)) from
the remote device. The default behavior is to upload files (I(scp put))
to the remote device.
type: bool
default: 'no'
ssh_private_key_file:
description:
- The C(ssh_private_key_file) argument is path to the SSH private key file.
This can be used if you need to provide a private key rather than loading
the key into the ssh-key-ring/environment
type: path
version_added: '2.10'
ssh_config:
description:
- The C(ssh_config) argument is path to the SSH configuration file.
This can be used to load SSH information from a configuration file.
If this option is not given by default ~/.ssh/config is queried.
type: path
version_added: '2.10'
requirements:
- junos-eznc
- ncclient (>=v0.5.2)
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Tested against vMX JUNOS version 17.3R1.10.
- Works with C(local) connections only.
- Since this module uses junos-eznc to establish connection with junos
device the netconf configuration parameters needs to be passed
using module options for example C(ssh_config) unlike other junos
modules that uses C(netconf) connection type.
"""
EXAMPLES = """
# the required set of connection arguments have been purposely left off
# the examples for brevity
- name: upload local file to home directory on remote device
junos_scp:
src: test.tgz
- name: upload local file to tmp directory on remote device
junos_scp:
src: test.tgz
dest: /tmp/
- name: download file from remote device
junos_scp:
src: test.tgz
remote_src: true
- name: ssh config file path for jumphost config
junos_scp:
src: test.tgz
remote_src: true
ssh_config: /home/user/customsshconfig
"""
RETURN = """
changed:
description: always true
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.junos.junos import junos_argument_spec, get_device
from ansible.module_utils._text import to_native
try:
from jnpr.junos.utils.scp import SCP
HAS_PYEZ = True
except ImportError:
HAS_PYEZ = False
def transfer_files(module, device):
dest = module.params['dest']
recursive = module.params['recursive']
with SCP(device) as scp:
for src in module.params['src']:
if module.params['remote_src']:
scp.get(src.strip(), local_path=dest, recursive=recursive)
else:
scp.put(src.strip(), remote_path=dest, recursive=recursive)
def main():
""" Main entry point for Ansible module execution
"""
argument_spec = dict(
src=dict(type='list', required=True),
dest=dict(type='path', required=False, default="."),
recursive=dict(type='bool', default=False),
remote_src=dict(type='bool', default=False),
ssh_private_key_file=dict(type='path'),
ssh_config=dict(type='path'),
transport=dict(default='netconf', choices=['netconf'])
)
argument_spec.update(junos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if module.params['provider'] is None:
module.params['provider'] = {}
if not HAS_PYEZ:
module.fail_json(
msg='junos-eznc is required but does not appear to be installed. '
'It can be installed using `pip install junos-eznc`'
)
result = dict(changed=True)
if not module.check_mode:
# open pyez connection and transfer files via SCP
try:
device = get_device(module)
transfer_files(module, device)
except Exception as ex:
module.fail_json(
msg=to_native(ex)
)
finally:
try:
# close pyez connection and ignore exceptions
device.close()
except Exception:
pass
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
FATruden/boto | boto/ec2/elb/__init__.py | 4 | 28205 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
"""
This module provides an interface to the Elastic Compute Cloud (EC2)
load balancing service from AWS.
"""
from boto.connection import AWSQueryConnection
from boto.ec2.instanceinfo import InstanceInfo
from boto.ec2.elb.loadbalancer import LoadBalancer, LoadBalancerZones
from boto.ec2.elb.instancestate import InstanceState
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.elb.listelement import ListElement
from boto.regioninfo import RegionInfo
import boto
RegionData = {
'us-east-1': 'elasticloadbalancing.us-east-1.amazonaws.com',
'us-gov-west-1': 'elasticloadbalancing.us-gov-west-1.amazonaws.com',
'us-west-1': 'elasticloadbalancing.us-west-1.amazonaws.com',
'us-west-2': 'elasticloadbalancing.us-west-2.amazonaws.com',
'sa-east-1': 'elasticloadbalancing.sa-east-1.amazonaws.com',
'eu-west-1': 'elasticloadbalancing.eu-west-1.amazonaws.com',
'ap-northeast-1': 'elasticloadbalancing.ap-northeast-1.amazonaws.com',
'ap-southeast-1': 'elasticloadbalancing.ap-southeast-1.amazonaws.com',
'ap-southeast-2': 'elasticloadbalancing.ap-southeast-2.amazonaws.com',
}
def regions():
"""
Get all available regions for the ELB service.
:rtype: list
:return: A list of :class:`boto.RegionInfo` instances
"""
regions = []
for region_name in RegionData:
region = RegionInfo(name=region_name,
endpoint=RegionData[region_name],
connection_cls=ELBConnection)
regions.append(region)
return regions
def connect_to_region(region_name, **kw_params):
"""
Given a valid region name, return a
:class:`boto.ec2.elb.ELBConnection`.
:param str region_name: The name of the region to connect to.
:rtype: :class:`boto.ec2.ELBConnection` or ``None``
:return: A connection to the given region, or None if an invalid region
name is given
"""
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
class ELBConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'elb_version', '2012-06-01')
DefaultRegionName = boto.config.get('Boto', 'elb_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'elb_region_endpoint',
'elasticloadbalancing.us-east-1.amazonaws.com')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True):
"""
Init method to create a new connection to EC2 Load Balancing Service.
.. note:: The region argument is overridden by the region specified in
the boto configuration file.
"""
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token,
validate_certs=validate_certs)
def _required_auth_capability(self):
return ['ec2']
def build_list_params(self, params, items, label):
if isinstance(items, str):
items = [items]
for index, item in enumerate(items):
params[label % (index + 1)] = item
def get_all_load_balancers(self, load_balancer_names=None):
"""
Retrieve all load balancers associated with your account.
:type load_balancer_names: list
:keyword load_balancer_names: An optional list of load balancer names.
:rtype: :py:class:`boto.resultset.ResultSet`
:return: A ResultSet containing instances of
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
params = {}
if load_balancer_names:
self.build_list_params(params, load_balancer_names,
'LoadBalancerNames.member.%d')
return self.get_list('DescribeLoadBalancers', params,
[('member', LoadBalancer)])
def create_load_balancer(self, name, zones, listeners=None, subnets=None,
security_groups=None, scheme='internet-facing', complex_listeners=None):
"""
Create a new load balancer for your account. By default the load
balancer will be created in EC2. To create a load balancer inside a
VPC, parameter zones must be set to None and subnets must not be None.
The load balancer will be automatically created under the VPC that
contains the subnet(s) specified.
:type name: string
:param name: The mnemonic name associated with the new load balancer
:type zones: List of strings
:param zones: The names of the availability zone(s) to add.
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS AIM
certificate, and must be specified when doing HTTPS.
:type subnets: list of strings
:param subnets: A list of subnet IDs in your VPC to attach to
your LoadBalancer.
:type security_groups: list of strings
:param security_groups: The security groups assigned to your
LoadBalancer within your VPC.
:type scheme: string
:param scheme: The type of a LoadBalancer. By default, Elastic
Load Balancing creates an internet-facing LoadBalancer with
a publicly resolvable DNS name, which resolves to public IP
addresses.
Specify the value internal for this option to create an
internal LoadBalancer with a DNS name that resolves to
private IP addresses.
This option is only available for LoadBalancers attached
to an Amazon VPC.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where;
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535.
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:rtype: :class:`boto.ec2.elb.loadbalancer.LoadBalancer`
:return: The newly created
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name,
'Scheme': scheme}
# Handle legacy listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
if zones:
self.build_list_params(params, zones, 'AvailabilityZones.member.%d')
if subnets:
self.build_list_params(params, subnets, 'Subnets.member.%d')
if security_groups:
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
load_balancer = self.get_object('CreateLoadBalancer',
params, LoadBalancer)
load_balancer.name = name
load_balancer.listeners = listeners
load_balancer.availability_zones = zones
load_balancer.subnets = subnets
load_balancer.security_groups = security_groups
return load_balancer
def create_load_balancer_listeners(self, name, listeners=None, complex_listeners=None):
"""
Creates a Listener (or group of listeners) for an existing
Load Balancer
:type name: string
:param name: The name of the load balancer to create the listeners for
:type listeners: List of tuples
:param listeners: Each tuple contains three or four values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol,
[SSLCertificateId]) where LoadBalancerPortNumber and
InstancePortNumber are integer values between 1 and 65535,
Protocol is a string containing either 'TCP', 'SSL', HTTP', or
'HTTPS'; SSLCertificateID is the ARN of a AWS AIM
certificate, and must be specified when doing HTTPS.
:type complex_listeners: List of tuples
:param complex_listeners: Each tuple contains four or five values,
(LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol,
SSLCertificateId).
Where;
- LoadBalancerPortNumber and InstancePortNumber are integer
values between 1 and 65535.
- Protocol and InstanceProtocol is a string containing either 'TCP',
'SSL', 'HTTP', or 'HTTPS'
- SSLCertificateId is the ARN of an SSL certificate loaded into
AWS IAM
:return: The status of the request
"""
if not listeners and not complex_listeners:
# Must specify one of the two options
return None
params = {'LoadBalancerName': name}
# Handle the simple listeners
if listeners:
for index, listener in enumerate(listeners):
i = index + 1
protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
# Handle the full listeners
if complex_listeners:
for index, listener in enumerate(complex_listeners):
i = index + 1
protocol = listener[2].upper()
InstanceProtocol = listener[3].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
params['Listeners.member.%d.InstanceProtocol' % i] = listener[3]
if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[4]
return self.get_status('CreateLoadBalancerListeners', params)
def delete_load_balancer(self, name):
"""
Delete a Load Balancer from your account.
:type name: string
:param name: The name of the Load Balancer to delete
"""
params = {'LoadBalancerName': name}
return self.get_status('DeleteLoadBalancer', params)
def delete_load_balancer_listeners(self, name, ports):
"""
Deletes a load balancer listener (or group of listeners)
:type name: string
:param name: The name of the load balancer to create the listeners for
:type ports: List int
:param ports: Each int represents the port on the ELB to be removed
:return: The status of the request
"""
params = {'LoadBalancerName': name}
for index, port in enumerate(ports):
params['LoadBalancerPorts.member.%d' % (index + 1)] = port
return self.get_status('DeleteLoadBalancerListeners', params)
def enable_availability_zones(self, load_balancer_name, zones_to_add):
"""
Add availability zones to an existing Load Balancer
All zones must be in the same region as the Load Balancer
Adding zones that are already registered with the Load Balancer
has no effect.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to add.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_add,
'AvailabilityZones.member.%d')
obj = self.get_object('EnableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def disable_availability_zones(self, load_balancer_name, zones_to_remove):
"""
Remove availability zones from an existing Load Balancer.
All zones must be in the same region as the Load Balancer.
Removing zones that are not registered with the Load Balancer
has no effect.
You cannot remove all zones from an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type zones: List of strings
:param zones: The name of the zone(s) to remove.
:rtype: List of strings
:return: An updated list of zones for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, zones_to_remove,
'AvailabilityZones.member.%d')
obj = self.get_object('DisableAvailabilityZonesForLoadBalancer',
params, LoadBalancerZones)
return obj.zones
def register_instances(self, load_balancer_name, instances):
"""
Add new Instances to an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to add.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('RegisterInstancesWithLoadBalancer',
params, [('member', InstanceInfo)])
def deregister_instances(self, load_balancer_name, instances):
"""
Remove Instances from an existing Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances to remove.
:rtype: List of strings
:return: An updated list of instances for this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DeregisterInstancesFromLoadBalancer',
params, [('member', InstanceInfo)])
def describe_instance_health(self, load_balancer_name, instances=None):
"""
Get current state of all Instances registered to an Load Balancer.
:type load_balancer_name: string
:param load_balancer_name: The name of the Load Balancer
:type instances: List of strings
:param instances: The instance ID's of the EC2 instances
to return status for. If not provided,
the state of all instances will be returned.
:rtype: List of :class:`boto.ec2.elb.instancestate.InstanceState`
:return: list of state info for instances in this Load Balancer.
"""
params = {'LoadBalancerName': load_balancer_name}
if instances:
self.build_list_params(params, instances,
'Instances.member.%d.InstanceId')
return self.get_list('DescribeInstanceHealth', params,
[('member', InstanceState)])
def configure_health_check(self, name, health_check):
"""
Define a health check for the EndPoints.
:type name: string
:param name: The mnemonic name associated with the load balancer
:type health_check: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:param health_check: A HealthCheck object populated with the desired
values.
:rtype: :class:`boto.ec2.elb.healthcheck.HealthCheck`
:return: The updated :class:`boto.ec2.elb.healthcheck.HealthCheck`
"""
params = {'LoadBalancerName': name,
'HealthCheck.Timeout': health_check.timeout,
'HealthCheck.Target': health_check.target,
'HealthCheck.Interval': health_check.interval,
'HealthCheck.UnhealthyThreshold': health_check.unhealthy_threshold,
'HealthCheck.HealthyThreshold': health_check.healthy_threshold}
return self.get_object('ConfigureHealthCheck', params, HealthCheck)
def set_lb_listener_SSL_certificate(self, lb_name, lb_port,
ssl_certificate_id):
"""
Sets the certificate that terminates the specified listener's SSL
connections. The specified certificate replaces any prior certificate
that was used on the same LoadBalancer and port.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port,
'SSLCertificateId': ssl_certificate_id}
return self.get_status('SetLoadBalancerListenerSSLCertificate', params)
def create_app_cookie_stickiness_policy(self, name, lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes that follow
that of an application-generated cookie. This policy can only be
associated with HTTP listeners.
This policy is similar to the policy created by
CreateLBCookieStickinessPolicy, except that the lifetime of the special
Elastic Load Balancing cookie follows the lifetime of the
application-generated cookie specified in the policy configuration. The
load balancer only inserts a new stickiness cookie when the application
response includes a new application cookie.
If the application cookie is explicitly removed or expires, the session
stops being sticky until a new application cookie is issued.
"""
params = {'CookieName': name,
'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('CreateAppCookieStickinessPolicy', params)
def create_lb_cookie_stickiness_policy(self, cookie_expiration_period,
lb_name, policy_name):
"""
Generates a stickiness policy with sticky session lifetimes controlled
by the lifetime of the browser (user-agent) or a specified expiration
period. This policy can only be associated only with HTTP listeners.
When a load balancer implements this policy, the load balancer uses a
special cookie to track the backend server instance for each request.
When the load balancer receives a request, it first checks to see if
this cookie is present in the request. If so, the load balancer sends
the request to the application server specified in the cookie. If not,
the load balancer sends the request to a server that is chosen based on
the existing load balancing algorithm.
A cookie is inserted into the response for binding subsequent requests
from the same user to that server. The validity of the cookie is based
on the cookie expiration time, which is specified in the policy
configuration.
None may be passed for cookie_expiration_period.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
if cookie_expiration_period is not None:
params['CookieExpirationPeriod'] = cookie_expiration_period
return self.get_status('CreateLBCookieStickinessPolicy', params)
def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes):
"""
Creates a new policy that contais the necessary attributes depending on
the policy type. Policies are settings that are saved for your load
balancer and that can be applied to the front-end listener, or
the back-end application server.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name,
'PolicyTypeName': policy_type}
for index, (name, value) in enumerate(policy_attributes.iteritems(), 1):
params['PolicyAttributes.member.%d.AttributeName' % index] = name
params['PolicyAttributes.member.%d.AttributeValue' % index] = value
else:
params['PolicyAttributes'] = ''
return self.get_status('CreateLoadBalancerPolicy', params)
def delete_lb_policy(self, lb_name, policy_name):
"""
Deletes a policy from the LoadBalancer. The specified policy must not
be enabled for any listeners.
"""
params = {'LoadBalancerName': lb_name,
'PolicyName': policy_name}
return self.get_status('DeleteLoadBalancerPolicy', params)
def set_lb_policies_of_listener(self, lb_name, lb_port, policies):
"""
Associates, updates, or disables a policy with a listener on the load
balancer. Currently only zero (0) or one (1) policy can be associated
with a listener.
"""
params = {'LoadBalancerName': lb_name,
'LoadBalancerPort': lb_port}
self.build_list_params(params, policies, 'PolicyNames.member.%d')
return self.get_status('SetLoadBalancerPoliciesOfListener', params)
def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies):
"""
Replaces the current set of policies associated with a port on which
the back-end server is listening with a new set of policies.
"""
params = {'LoadBalancerName': lb_name,
'InstancePort': instance_port}
if policies:
self.build_list_params(params, policies, 'PolicyNames.member.%d')
else:
params['PolicyNames'] = ''
return self.get_status('SetLoadBalancerPoliciesForBackendServer', params)
def apply_security_groups_to_lb(self, name, security_groups):
"""
Applies security groups to the load balancer.
Applying security groups that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type security_groups: List of strings
:param security_groups: The name of the security group(s) to add.
:rtype: List of strings
:return: An updated list of security groups for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
return self.get_list('ApplySecurityGroupsToLoadBalancer',
params, None)
def attach_lb_to_subnets(self, name, subnets):
"""
Attaches load balancer to one or more subnets.
Attaching subnets that are already registered with the
Load Balancer has no effect.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to add.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('AttachLoadBalancerToSubnets',
params, None)
def detach_lb_from_subnets(self, name, subnets):
"""
Detaches load balancer from one or more subnets.
:type name: string
:param name: The name of the Load Balancer
:type subnets: List of strings
:param subnets: The name of the subnet(s) to detach.
:rtype: List of strings
:return: An updated list of subnets for this Load Balancer.
"""
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
return self.get_list('DetachLoadBalancerFromSubnets',
params, None)
| mit |
tmerrick1/spack | var/spack/repos/builtin/packages/py-elasticsearch/package.py | 5 | 2066 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyElasticsearch(PythonPackage):
"""Python client for Elasticsearch"""
homepage = "https://github.com/elastic/elasticsearch-py"
url = "https://pypi.io/packages/source/e/elasticsearch/elasticsearch-5.2.0.tar.gz"
version('5.2.0', '66692fd1b4189039206c2fde4a4d616a')
version('2.3.0', '2550f3b51629cf1ef9636608af92c340')
depends_on('py-setuptools', type='build')
depends_on('py-urllib3@1.8:1.999', type=('build', 'run'))
# tests_require
# depends_on('py-requests@1.0.0:2.9.999', type=('build', 'run'))
# depends_on('py-nose', type=('build', 'run'))
# depends_on('py-coverage', type=('build', 'run'))
# depends_on('py-mock', type=('build', 'run'))
# depends_on('py-pyyaml', type=('build', 'run'))
# depends_on('py-nosexcover', type=('build', 'run'))
| lgpl-2.1 |
adviti/melange | thirdparty/google_appengine/lib/django_1_2/tests/modeltests/field_subclassing/tests.py | 42 | 2795 | from django.core import serializers
from django.test import TestCase
from fields import Small
from models import DataModel, MyModel, OtherModel
class CustomField(TestCase):
def test_defer(self):
d = DataModel.objects.create(data=[1, 2, 3])
self.assertTrue(isinstance(d.data, list))
d = DataModel.objects.get(pk=d.pk)
self.assertTrue(isinstance(d.data, list))
self.assertEqual(d.data, [1, 2, 3])
d = DataModel.objects.defer("data").get(pk=d.pk)
d.save()
d = DataModel.objects.get(pk=d.pk)
self.assertTrue(isinstance(d.data, list))
self.assertEqual(d.data, [1, 2, 3])
def test_custom_field(self):
# Creating a model with custom fields is done as per normal.
s = Small(1, 2)
self.assertEqual(str(s), "12")
m = MyModel.objects.create(name="m", data=s)
# Custom fields still have normal field's attributes.
self.assertEqual(m._meta.get_field("data").verbose_name, "small field")
# The m.data attribute has been initialised correctly. It's a Small
# object.
self.assertEqual((m.data.first, m.data.second), (1, 2))
# The data loads back from the database correctly and 'data' has the
# right type.
m1 = MyModel.objects.get(pk=m.pk)
self.assertTrue(isinstance(m1.data, Small))
self.assertEqual(str(m1.data), "12")
# We can do normal filtering on the custom field (and will get an error
# when we use a lookup type that does not make sense).
s1 = Small(1, 3)
s2 = Small("a", "b")
self.assertQuerysetEqual(
MyModel.objects.filter(data__in=[s, s1, s2]), [
"m",
],
lambda m: m.name,
)
self.assertRaises(TypeError, lambda: MyModel.objects.filter(data__lt=s))
# Serialization works, too.
stream = serializers.serialize("json", MyModel.objects.all())
self.assertEqual(stream, '[{"pk": 1, "model": "field_subclassing.mymodel", "fields": {"data": "12", "name": "m"}}]')
obj = list(serializers.deserialize("json", stream))[0]
self.assertEqual(obj.object, m)
# Test retrieving custom field data
m.delete()
m1 = MyModel.objects.create(name="1", data=Small(1, 2))
m2 = MyModel.objects.create(name="2", data=Small(2, 3))
self.assertQuerysetEqual(
MyModel.objects.all(), [
"12",
"23",
],
lambda m: str(m.data)
)
def test_field_subclassing(self):
o = OtherModel.objects.create(data=Small("a", "b"))
o = OtherModel.objects.get()
self.assertEqual(o.data.first, "a")
self.assertEqual(o.data.second, "b")
| apache-2.0 |
ojake/django | django/core/validators.py | 99 | 11950 | from __future__ import unicode_literals
import re
from django.core.exceptions import ValidationError
from django.utils import six
from django.utils.deconstruct import deconstructible
from django.utils.encoding import force_text
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.six.moves.urllib.parse import urlsplit, urlunsplit
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, '', [], (), {})
@deconstructible
class RegexValidator(object):
regex = ''
message = _('Enter a valid value.')
code = 'invalid'
inverse_match = False
flags = 0
def __init__(self, regex=None, message=None, code=None, inverse_match=None, flags=None):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, six.string_types):
raise TypeError("If the flags are set, regex must be a regular expression string.")
# Compile the regex if it was not passed pre-compiled.
if isinstance(self.regex, six.string_types):
self.regex = re.compile(self.regex, self.flags)
def __call__(self, value):
"""
Validates that the input matches the regular expression
if inverse_match is False, otherwise raises ValidationError.
"""
if not (self.inverse_match is not bool(self.regex.search(
force_text(value)))):
raise ValidationError(self.message, code=self.code)
def __eq__(self, other):
return (
isinstance(other, RegexValidator) and
self.regex.pattern == other.regex.pattern and
self.regex.flags == other.regex.flags and
(self.message == other.message) and
(self.code == other.code) and
(self.inverse_match == other.inverse_match)
)
def __ne__(self, other):
return not (self == other)
@deconstructible
class URLValidator(RegexValidator):
ul = '\u00a1-\uffff' # unicode letters range (must be a unicode string, not a raw string)
# IP patterns
ipv4_re = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)(?:\.(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}'
ipv6_re = r'\[[0-9a-f:\.]+\]' # (simple regex, validated later)
# Host patterns
hostname_re = r'[a-z' + ul + r'0-9](?:[a-z' + ul + r'0-9-]*[a-z' + ul + r'0-9])?'
domain_re = r'(?:\.(?!-)[a-z' + ul + r'0-9-]*(?<!-))*'
tld_re = r'\.(?:[a-z' + ul + r']{2,}|xn--[a-z0-9]+)\.?'
host_re = '(' + hostname_re + domain_re + tld_re + '|localhost)'
regex = re.compile(
r'^(?:[a-z0-9\.\-]*)://' # scheme is validated separately
r'(?:\S+(?::\S*)?@)?' # user:pass authentication
r'(?:' + ipv4_re + '|' + ipv6_re + '|' + host_re + ')'
r'(?::\d{2,5})?' # port
r'(?:[/?#][^\s]*)?' # resource path
r'\Z', re.IGNORECASE)
message = _('Enter a valid URL.')
schemes = ['http', 'https', 'ftp', 'ftps']
def __init__(self, schemes=None, **kwargs):
super(URLValidator, self).__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
value = force_text(value)
# Check first if the scheme is valid
scheme = value.split('://')[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code)
# Then check full URL
try:
super(URLValidator, self).__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
scheme, netloc, path, query, fragment = urlsplit(value)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super(URLValidator, self).__call__(url)
else:
raise
else:
# Now verify IPv6 in the netloc part
host_match = re.search(r'^\[(.+)\](?::\d{2,5})?$', urlsplit(value).netloc)
if host_match:
potential_ip = host_match.groups()[0]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(self.message, code=self.code)
url = value
integer_validator = RegexValidator(
re.compile('^-?\d+\Z'),
message=_('Enter a valid integer.'),
code='invalid',
)
def validate_integer(value):
return integer_validator(value)
@deconstructible
class EmailValidator(object):
message = _('Enter a valid email address.')
code = 'invalid'
user_regex = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string
re.IGNORECASE)
domain_regex = re.compile(
# max length for domain name labels is 63 characters per RFC 1034
r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z',
re.IGNORECASE)
literal_regex = re.compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r'\[([A-f0-9:\.]+)\]\Z',
re.IGNORECASE)
domain_whitelist = ['localhost']
def __init__(self, message=None, code=None, whitelist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if whitelist is not None:
self.domain_whitelist = whitelist
def __call__(self, value):
value = force_text(value)
if not value or '@' not in value:
raise ValidationError(self.message, code=self.code)
user_part, domain_part = value.rsplit('@', 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code)
if (domain_part not in self.domain_whitelist and
not self.validate_domain_part(domain_part)):
# Try for possible IDN domain-part
try:
domain_part = domain_part.encode('idna').decode('ascii')
if self.validate_domain_part(domain_part):
return
except UnicodeError:
pass
raise ValidationError(self.message, code=self.code)
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match.group(1)
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator) and
(self.domain_whitelist == other.domain_whitelist) and
(self.message == other.message) and
(self.code == other.code)
)
validate_email = EmailValidator()
slug_re = re.compile(r'^[-a-zA-Z0-9_]+\Z')
validate_slug = RegexValidator(
slug_re,
_("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."),
'invalid'
)
slug_unicode_re = re.compile(r'^[-\w]+\Z', re.U)
validate_unicode_slug = RegexValidator(
slug_unicode_re,
_("Enter a valid 'slug' consisting of Unicode letters, numbers, underscores, or hyphens."),
'invalid'
)
ipv4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\Z')
validate_ipv4_address = RegexValidator(ipv4_re, _('Enter a valid IPv4 address.'), 'invalid')
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid')
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
ip_address_validator_map = {
'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')),
'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')),
'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters returns the appropriate validators for
the GenericIPAddressField.
This code is here, because it is exactly the same for the model and the form field.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map)))
def int_list_validator(sep=',', message=None, code='invalid'):
regexp = re.compile('^\d+(?:%s\d+)*\Z' % re.escape(sep))
return RegexValidator(regexp, message=message, code=code)
validate_comma_separated_integer_list = int_list_validator(
message=_('Enter only digits separated by commas.'),
)
@deconstructible
class BaseValidator(object):
compare = lambda self, a, b: a is not b
clean = lambda self, x: x
message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).')
code = 'limit_value'
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
params = {'limit_value': self.limit_value, 'show_value': cleaned, 'value': value}
if self.compare(cleaned, self.limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
(self.limit_value == other.limit_value)
and (self.message == other.message)
and (self.code == other.code)
)
@deconstructible
class MaxValueValidator(BaseValidator):
compare = lambda self, a, b: a > b
message = _('Ensure this value is less than or equal to %(limit_value)s.')
code = 'max_value'
@deconstructible
class MinValueValidator(BaseValidator):
compare = lambda self, a, b: a < b
message = _('Ensure this value is greater than or equal to %(limit_value)s.')
code = 'min_value'
@deconstructible
class MinLengthValidator(BaseValidator):
compare = lambda self, a, b: a < b
clean = lambda self, x: len(x)
message = ungettext_lazy(
'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'min_length'
@deconstructible
class MaxLengthValidator(BaseValidator):
compare = lambda self, a, b: a > b
clean = lambda self, x: len(x)
message = ungettext_lazy(
'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'max_length'
| bsd-3-clause |
bellhops/airflow | airflow/operators/mysql_operator.py | 9 | 1170 | import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
*args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(self.sql, parameters=self.parameters)
| apache-2.0 |
mortonjt/scipy | scipy/weave/examples/increment_example.py | 92 | 1104 | # examples/increment_example.py
# from weave import ext_tools
# use the following so that development version is used.
from __future__ import absolute_import, print_function
import sys
sys.path.insert(0,'..')
import ext_tools
def build_increment_ext():
""" Build a simple extension with functions that increment numbers.
The extension will be built in the local directory.
"""
mod = ext_tools.ext_module('increment_ext')
# Effectively a type declaration for 'a' in the following functions.
a = 1
ext_code = "return_val = PyInt_FromLong(a+1);"
func = ext_tools.ext_function('increment',ext_code,['a'])
mod.add_function(func)
ext_code = "return_val = PyInt_FromLong(a+2);"
func = ext_tools.ext_function('increment_by_2',ext_code,['a'])
mod.add_function(func)
mod.compile()
if __name__ == "__main__":
try:
import increment_ext
except ImportError:
build_increment_ext()
import increment_ext
a = 1
print('a, a+1:', a, increment_ext.increment(a))
print('a, a+2:', a, increment_ext.increment_by_2(a))
| bsd-3-clause |
fertozudo/umatoo | lib/django/core/management/commands/diffsettings.py | 479 | 1565 | from django.core.management.base import BaseCommand
def module_to_dict(module, omittable=lambda k: k.startswith('_')):
"""Converts a module namespace to a Python dictionary."""
return {k: repr(v) for k, v in module.__dict__.items() if not omittable(k)}
class Command(BaseCommand):
help = """Displays differences between the current settings.py and Django's
default settings. Settings that don't appear in the defaults are
followed by "###"."""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('--all', action='store_true', dest='all', default=False,
help='Display all settings, regardless of their value. '
'Default values are prefixed by "###".')
def handle(self, **options):
# Inspired by Postfix's "postconf -n".
from django.conf import settings, global_settings
# Because settings are imported lazily, we need to explicitly load them.
settings._setup()
user_settings = module_to_dict(settings._wrapped)
default_settings = module_to_dict(global_settings)
output = []
for key in sorted(user_settings):
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
elif options['all']:
output.append("### %s = %s" % (key, user_settings[key]))
return '\n'.join(output)
| bsd-3-clause |
dysya92/monkeys | flask/lib/python2.7/site-packages/pytz/tzfile.py | 480 | 4869 | #!/usr/bin/env python
'''
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
'''
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from datetime import datetime, timedelta
from struct import unpack, calcsize
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
from pytz.tzinfo import memorized_datetime, memorized_timedelta
def _byte_string(s):
"""Cast a string or byte string to an ASCII byte string."""
return s.encode('US-ASCII')
_NULL = _byte_string('\0')
def _std_string(s):
"""Cast a string or byte string to an ASCII string."""
return str(s.decode('US-ASCII'))
def build_tzinfo(zone, fp):
head_fmt = '>4s c 15x 6l'
head_size = calcsize(head_fmt)
(magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt,
typecnt, charcnt) = unpack(head_fmt, fp.read(head_size))
# Make sure it is a tzfile(5) file
assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic)
# Read out the transition times, localtime indices and ttinfo structures.
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt)
data_size = calcsize(data_fmt)
data = unpack(data_fmt, fp.read(data_size))
# make sure we unpacked the right number of values
assert len(data) == 2 * timecnt + 3 * typecnt + 1
transitions = [memorized_datetime(trans)
for trans in data[:timecnt]]
lindexes = list(data[timecnt:2 * timecnt])
ttinfo_raw = data[2 * timecnt:-1]
tznames_raw = data[-1]
del data
# Process ttinfo into separate structs
ttinfo = []
tznames = {}
i = 0
while i < len(ttinfo_raw):
# have we looked up this timezone name yet?
tzname_offset = ttinfo_raw[i+2]
if tzname_offset not in tznames:
nul = tznames_raw.find(_NULL, tzname_offset)
if nul < 0:
nul = len(tznames_raw)
tznames[tzname_offset] = _std_string(
tznames_raw[tzname_offset:nul])
ttinfo.append((ttinfo_raw[i],
bool(ttinfo_raw[i+1]),
tznames[tzname_offset]))
i += 3
# Now build the timezone object
if len(transitions) == 0:
ttinfo[0][0], ttinfo[0][2]
cls = type(zone, (StaticTzInfo,), dict(
zone=zone,
_utcoffset=memorized_timedelta(ttinfo[0][0]),
_tzname=ttinfo[0][2]))
else:
# Early dates use the first standard time ttinfo
i = 0
while ttinfo[i][1]:
i += 1
if ttinfo[i] == ttinfo[lindexes[0]]:
transitions[0] = datetime.min
else:
transitions.insert(0, datetime.min)
lindexes.insert(0, i)
# calculate transition info
transition_info = []
for i in range(len(transitions)):
inf = ttinfo[lindexes[i]]
utcoffset = inf[0]
if not inf[1]:
dst = 0
else:
for j in range(i-1, -1, -1):
prev_inf = ttinfo[lindexes[j]]
if not prev_inf[1]:
break
dst = inf[0] - prev_inf[0] # dst offset
# Bad dst? Look further. DST > 24 hours happens when
# a timzone has moved across the international dateline.
if dst <= 0 or dst > 3600*3:
for j in range(i+1, len(transitions)):
stdinf = ttinfo[lindexes[j]]
if not stdinf[1]:
dst = inf[0] - stdinf[0]
if dst > 0:
break # Found a useful std time.
tzname = inf[2]
# Round utcoffset and dst to the nearest minute or the
# datetime library will complain. Conversions to these timezones
# might be up to plus or minus 30 seconds out, but it is
# the best we can do.
utcoffset = int((utcoffset + 30) // 60) * 60
dst = int((dst + 30) // 60) * 60
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
cls = type(zone, (DstTzInfo,), dict(
zone=zone,
_utc_transition_times=transitions,
_transition_info=transition_info))
return cls()
if __name__ == '__main__':
import os.path
from pprint import pprint
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
tz = build_tzinfo('Australia/Melbourne',
open(os.path.join(base,'Australia','Melbourne'), 'rb'))
tz = build_tzinfo('US/Eastern',
open(os.path.join(base,'US','Eastern'), 'rb'))
pprint(tz._utc_transition_times)
#print tz.asPython(4)
#print tz.transitions_mapping
| bsd-3-clause |
jiadaizhao/LeetCode | 1101-1200/1197-Minimum Knight Moves/1197-Minimum Knight Moves.py | 1 | 2318 | import collections
# BFS
class Solution:
def minKnightMoves(self, x: int, y: int) -> int:
if x == 0 and y == 0:
return 0
Q = collections.deque([(0, 0)])
visited = set([(0, 0)])
step = 0
x, y = abs(x), abs(y)
while Q:
step += 1
for _ in range(len(Q)):
cx, cy = Q.popleft()
for nx, ny in ((cx-2, cy-1), (cx-2, cy+1), (cx-1, cy-2), (cx-1, cy+2),
(cx+1, cy-2), (cx+1, cy+2), (cx+2, cy-1), (cx+2, cy+1)):
if nx == x and ny == y:
return step
if (nx, ny) not in visited and -2 <= nx <= x + 2 and -2 <= ny <= y + 2:
visited.add((nx, ny))
Q.append((nx, ny))
return -1
# Bidirection BFS
class Solution2:
def minKnightMoves(self, x: int, y: int) -> int:
if x == 0 and y == 0:
return 0
step = 0
x, y = abs(x), abs(y)
visitedBegin = set([(0, 0)])
visitedEnd = set([(x, y)])
visited = set([(0, 0)])
while visitedBegin and visitedEnd:
if len(visitedBegin) > len(visitedEnd):
visitedBegin, visitedEnd = visitedEnd, visitedBegin
step += 1
temp = set()
for cx, cy in visitedBegin:
for nx, ny in ((cx-2, cy-1), (cx-2, cy+1), (cx-1, cy-2), (cx-1, cy+2),
(cx+1, cy-2), (cx+1, cy+2), (cx+2, cy-1), (cx+2, cy+1)):
if (nx, ny) in visitedEnd:
return step
if (nx, ny) not in visited and -2 <= nx <= x + 2 and -2 <= ny <= y + 2:
visited.add((nx, ny))
temp.add((nx, ny))
visitedBegin = temp
return -1
import math
class Solution3:
def minKnightMoves(self, x: int, y: int) -> int:
if x == 0 and y == 0:
return 0
x, y = abs(x), abs(y)
if x > y:
x, y = y, x
if x == 1 and y == 1:
return 2
if x == 2 and y == 2:
return 4
if x + y == 1:
return 3
m = max(math.ceil(y / 2), math.ceil((x + y) / 3))
return m + ((m + x + y) & 1)
| mit |
Gnomescroll/Gnomescroll | scripts/lsystem.py | 1 | 10694 | '''
Bernie's L System demo in Python.
This program implements a simple context free L System,
and renders two dimensional images in a window.
It needs (at least) python 2.6, and uses the turtle
graphics library, which is based on TK graphics.
To try it out, either load into Python and run the
demo functions, or run it from the command line:
python lsystem.py
Author: Bernie Pope: www.cs.mu.oz.au/~bjpop/
Licence: unrestricted.
Feel free to play with the code as much as you like
and share your changes with the world.
Some remarks about the implementation:
An L-System is a term rewriting system made up of one or
more rules. Rules have the form:
head -> body
where head is a variable, and body is a non-empty string
made up of variables and constants. Variables are
denoted by upper-case alphabetic letters. Constants
are denoted by any character which is not a variable.
Here is an example L-System:
X -> F-[[X]+X]+F[+FX]-X
F -> FF
In this program the convention is that the first rule
is taken as the starting point.
An LSystem object is constructed like so:
rules = ['X -> F-[[X]+X]+F[+FX]-X', 'F -> FF']
my_system = LSystem(rules)
That is, the LSystem constructor takes a list of strings
as its argument, where each string is a single rule.
An LSystem object doesn't do anything on its own - it must
be interpreted.
LSystem objects have a run method which takes two parameters:
1) An non-negative integer which indicates how many times
the rules should be iterated.
2) An interpreter object which implements an 'interpretTokens'
method.
Here is a simple example:
class SimpleInterp(object):
def interpretTokens(self, tokens): return tokens
answer = my_system().run(6, SimpleInterp())
A more sophisticated interpreter is defined called Visualise
which renders the result of iterating an LSystem as turtle
graphics.
The Visualise constructor takes a dictionary mapping LSystem
variables to functions, here is an example:
{ '-': lambda _ : left(left_angle)
, '+': lambda _ : right(right_angle)
, 'F': lambda _ : forward(fwd_distance)
, '[': lambda obj : obj.push()
, ']': lambda obj : obj.pop()
}
'''
import sys
# import the turtle graphics library
try:
from turtle import *
except ImportError:
print("This program requires the turtle graphics library.")
print("Unfortunately Python cannot find that library on your computer.")
print("See the documentation at: http://docs.python.org/library/turtle.html")
sys.exit(-1)
from collections import deque
py_version = sys.version_info[:2]
if py_version < (2,6):
print("This program requires Python version 2.6 or greater to run.")
print("Your version of Python is " + '.'.join(map(str,py_version)) + ", which is too old.")
sys.exit(-1)
# Some demo functions, which make it relatively easy to use
def interactive_demo():
def show_demo(name, action):
print(name)
action()
input = raw_input("Press any key to continue or q/Q to quit: ")
if input.lower() == 'q':
sys.exit(0)
dragon_demo()
#demo1()
#show_demo("Bushy tree", demo1)
#show_demo("Twiggy tree", demo2)
#show_demo("Koch curve", demo3)
#show_demo("Sierpinski triangle", demo4)
#show_demo("Peano Gosper curve", demo5)
#show_demo("Conifer-like tree", demo6)
#show_demo("Tiles", demo7)
#show_demo("Crystal", demo8)
#show_demo("Peano curve", demo9)
def demo0():
class SimpleInterp(object):
def interpretTokens(self, tokens): return tokens
return bushy_tree().run(6, SimpleInterp())
def demo1():
def init():
initPosition()
left(90)
vis = Visualise(basic_actions(25,25,5), init)
bushy_tree().run(5,vis)
def bushy_tree():
return LSystem(['F -> FF-[-F+F+F]+[+F-F-F]'])
def demo2():
def init():
initPosition()
left(90)
vis = Visualise(basic_actions(25,25,2), init)
twiggy_tree().run(7,vis)
def twiggy_tree():
rules = ['X -> F-[[X]+X]+F[+FX]-X', 'F -> FF']
return LSystem(rules)
def dragon():
rules = ['X -> X+YF', 'Y -> FX-Y', 'F -> F']
return LSystem(rules)
def dragon_demo():
#class SimpleInterp(object):
#def interpretTokens(self, tokens): return tokens
#def init(): initPosition(lambda width, height: (width/2, -height/2))
actions = basic_actions(90,90, 1)
#actions['-'] = lambda _ : left(90)
#actions['+'] = lambda _ : right(90)
def init():
initPosition()
left(90)
vis = Visualise(actions, init)
dragon().run(90, vis)
def demo3():
def init(): initPosition(lambda width, height : (-9*width/20, -height/4))
actions = basic_actions(None,None,0.06)
actions['-'] = lambda _ : right(68)
actions['+'] = lambda _ : left(68)
vis = Visualise(actions, init)
koch().run(7,vis)
def koch():
return LSystem(['F -> F+F-F-F+F'])
def demo4():
def init():
initPosition(lambda width, height : (-3*width/8, -height/4))
actions = basic_actions(60,60,0)
actions['A'] = lambda _ : forward(0.5)
actions['B'] = lambda _ : forward(0.5)
actions['-'] = lambda _ : right(60)
actions['+'] = lambda _ : left(60)
vis = Visualise(actions, init)
sierpinski().run(10,vis)
def sierpinski():
return LSystem(['A -> B-A-B', 'B -> A+B+A'])
def demo5():
def init():
initPosition(lambda width, height : (width/4, 3*height/8))
actions = basic_actions(60,60,4)
vis = Visualise(actions, init)
peano_gosper().run(5,vis)
def peano_gosper():
rules = [ 'X -> X+YF++YF-FX--FXFX-YF+'
, 'Y -> -FX+YFYF++YF+FX--FX-Y'
, 'F -> F' ]
return LSystem(rules)
def demo6():
def init():
initPosition(lambda width, height : (0, -3*height/8))
left(90)
actions = basic_actions(20,20,11)
vis = Visualise(actions, init)
conifer().run(12,vis)
def conifer():
rules = [ 'I -> VZFFF'
, 'V -> [+++W][---W]YV'
, 'W -> +X[-W]Z'
, 'X -> -W[+X]Z'
, 'Y -> YZ'
, 'Z -> [-FFF][+FFF]F'
, 'F -> F' ]
return LSystem(rules)
def demo7():
def init():
initPosition(lambda width, height : (-width/5, 0))
actions = basic_actions(90,90,4)
vis = Visualise(actions, init)
tiles().run(6,vis)
def tiles():
rules = [ 'I -> F+F+F+F'
, 'F -> FF+F-F+F+FF' ]
return LSystem(rules)
def demo8():
def init():
initPosition(lambda width, height : (-width/3, -height/3))
left(90)
actions = basic_actions(90,90,2)
vis = Visualise(actions, init)
crystal().run(6,vis)
def crystal():
rules = ['I -> F+F+F+F', 'F -> FF+F++F+F']
return LSystem(rules)
def demo9():
def init():
initPosition(lambda width, height : (-width/3, -height/3))
left(90)
actions = basic_actions(90,90,2)
vis = Visualise(actions, init)
peano_curve().run(5,vis)
def peano_curve():
rules = [ 'X -> XFYFX+F+YFXFY-F-XFYFX'
, 'Y -> YFXFY-F-XFYFX+F+YFXFY'
, 'F -> F' ]
return LSystem(rules)
class LSystem(object):
def __init__ (self, rules):
if len(rules) > 0:
for r in rules:
exec(compile(r)) in locals()
firstRuleName,_ = decomposeRule(rules[0])
exec('def start(n): return ' + firstRuleName + '(n)') in locals()
self.rule = start
else:
self.rule = lambda _ : ''
def run(self, maxIterations, interpreter):
return interpreter.interpretTokens(self.rule(maxIterations))
class Visualise (object):
def __init__(self, dict, initCommand=None):
self.actions = dict
self.initCommand = initCommand
self.stack = deque()
def interpretTokens(self, tokens):
initDisplay()
if self.initCommand != None: self.initCommand()
def action_fun(token):
return self.actions.get(token, lambda _ : None)(self)
self.stack = deque()
map (action_fun, tokens)
def push(self):
orient = heading()
pos = position()
self.stack.append((orient, pos))
def pop(self):
stack = self.stack
if len(stack) == 0:
raise Exception('Attempt to pop empty stack')
(orient, pos) = stack.pop()
up()
goto(pos)
setheading(orient)
down()
def basic_actions (left_angle, right_angle, fwd_distance):
return { '-': lambda _ : left(left_angle)
, '+': lambda _ : right(right_angle)
, 'F': lambda _ : forward(fwd_distance)
, '[': lambda obj : obj.push()
, ']': lambda obj : obj.pop()
}
# Configuration of graphics window
def initDisplay(drawColour="black"):
title ("Bernie's L System demo")
setup()
reset()
degrees()
color(drawColour)
# Try to make the animation of drawing reasonably fast.
tracer(50,0) # Only draw every 50th update, set delay to zero.
hideturtle() # don't draw the turtle; increase drawing speed.
def initPosition(mover=lambda width, height : (0, -height/2)):
height = window_height()
width = window_width()
up()
goto (mover (width, height))
down()
'''
The input rule:
X -> X+X+F
is compiled to:
def X(n):
if n > 0:
xn = X(n-1)
fn = F(n-1)
return ''.join([xn,'+',xn,'+',fn])
else:
return 'X'
'''
def compile(rule):
(name, body) = decomposeRule(rule)
(vars,listIds) = varsIds(body)
defPart = 'def ' + name + '(n):'
varBinds = list(map(mkVarBind,vars))
joinListPart = "''.join([" + ','.join(listIds) + '])'
ifHead = 'if n > 0:'
ifBody = varBinds + ['return ' + joinListPart]
elsePart = 'else: return ' + quote(name)
return '\n'.join(
[defPart] + map(indent,
[ifHead] + map(indent,
ifBody) + [elsePart]))
def decomposeRule(rule):
splitRule = rule.split('->')
if len(splitRule) != 2:
raise Exception("badly formed L-System rule: " + quote(str(rule)))
name = splitRule[0].strip()
body = splitRule[1].strip()
if len(name) != 1 or len(body) == 0:
raise Exception("badly formed L-System rule: " + quote(str(rule)))
return (name, body)
def mkVarBind(var):
return var.lower() + 'n = ' + var + '(n-1)'
def quote(str): return "'" + str + "'"
def indent(str): return ' ' + str
def varsIds(str):
vars = set()
list = []
for c in str:
if c.isupper():
vars.add(c)
list.append(c.lower()+'n')
else:
list.append(quote(c))
return (vars, list)
if __name__ == "__main__":
interactive_demo()
| gpl-3.0 |
cogniteev/docido-python-sdk | tests/test_ha.py | 1 | 2564 | import unittest
from docido_sdk.toolbox.ha import RetryDelaySeries, HA
class TestHA(unittest.TestCase):
def test_teb_policy(self):
teb = RetryDelaySeries.get(
'truncated_exponential_backoff',
max_collisions=5,
delay=10)
self.assertEquals(next(teb), 0)
self.assertIn(next(teb), [0, 10])
self.assertIn(next(teb), [0, 10, 20])
self.assertIn(next(teb), [0, 10, 20, 30])
self.assertIn(next(teb), [0, 10, 20, 30, 40])
self.assertEquals(next(teb), 0)
def test_linear_policy(self):
linear = RetryDelaySeries.get(
'linear',
delay=10, max_delay=100, step=40
)
self.assertEquals(next(linear), 10)
self.assertEquals(next(linear), 50)
self.assertEquals(next(linear), 90)
self.assertEquals(next(linear), 100)
self.assertEquals(next(linear), 100)
# No stop
linear = RetryDelaySeries.get(
'linear',
delay=10, step=40
)
self.assertEquals(next(linear), 10)
self.assertEquals(next(linear), 50)
self.assertEquals(next(linear), 90)
self.assertEquals(next(linear), 130)
self.assertEquals(next(linear), 170)
def test_ha(self):
test_instance = self
call_count = [0]
class Foo(HA):
def __init__(self):
super(Foo, self).__init__()
self.ha_config.default.delay = 0
@HA.catch(RuntimeError)
def method(self, counter):
if counter != 0:
raise RuntimeError()
return 0
def ha_on_error(self, method, exc, args, kwargs):
test_instance.assertIsInstance(exc, RuntimeError)
counter = args[0]
test_instance.assertIsInstance(counter, int)
call_count[0] += 1
return (counter - 1,), {}
foo = Foo()
self.assertEqual(foo.method(3), 0)
def test_ha_max_retries(self):
call_count = [0]
class Foo(HA):
def __init__(self):
super(Foo, self).__init__()
self.ha_config.default.delay = 0
self.ha_config.default.max_retries = 2
@HA.catch(RuntimeError)
def method(self):
call_count[0] += 1
raise RuntimeError()
foo = Foo()
with self.assertRaises(RuntimeError):
foo.method()
self.assertEqual(call_count[0], 3)
| apache-2.0 |
vileopratama/vitech | src/addons/procurement/__openerp__.py | 18 | 1778 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name' : 'Procurements',
'version' : '1.0',
'website': 'https://www.odoo.com/page/manufacturing',
'category' : 'Hidden',
'depends' : ['base', 'product'],
'description': """
This is the module for computing Procurements.
==============================================
This procurement module only depends on the product module and is not useful
on itself. Procurements represent needs that need to be solved by a procurement
rule. When a procurement is created, it is confirmed. When a rule is found,
it will be put in running state. After, it will check if what needed to be done
for the rule has been executed. Then it will go to the done state. A procurement
can also go into exception, for example when it can not find a rule and it can be cancelled.
The mechanism will be extended by several modules. The procurement rule of stock will
create a move and the procurement will be fulfilled when the move is done.
The procurement rule of sale_service will create a task. Those of purchase or
mrp will create a purchase order or a manufacturing order.
The scheduler will check if it can assign a rule to confirmed procurements and if
it can put running procurements to done.
Procurements in exception should be checked manually and can be re-run.
""",
'data': [
'security/ir.model.access.csv',
'security/procurement_security.xml',
'procurement_data.xml',
'wizard/schedulers_all_view.xml',
'procurement_view.xml',
'company_view.xml',
'product_product_view.xml',
],
'demo': [],
'test': ['test/procurement.yml'],
'installable': True,
'auto_install': True,
}
| mit |
dweinstein/finsky | finsky/protos/restore_pb2.py | 2 | 9808 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: restore.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import common_pb2 as common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='restore.proto',
package='Restore',
syntax='proto2',
serialized_pb=_b('\n\rrestore.proto\x12\x07Restore\x1a\x0c\x63ommon.proto\"[\n GetBackupDocumentChoicesResponse\x12\x37\n\x12\x62\x61\x63kupDocumentInfo\x18\x01 \x03(\x0b\x32\x1b.Restore.BackupDocumentInfo\"U\n\x1eGetBackupDeviceChoicesResponse\x12\x33\n\x10\x62\x61\x63kupDeviceInfo\x18\x01 \x03(\x0b\x32\x19.Restore.BackupDeviceInfo\"z\n\x10\x42\x61\x63kupDeviceInfo\x12\x11\n\tandroidId\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0crestoreToken\x18\x03 \x01(\t\x12\x14\n\x0cnumDocuments\x18\x04 \x01(\x05\x12\x19\n\x11lastCheckinTimeMs\x18\x05 \x01(\x03\"\x96\x01\n\x12\x42\x61\x63kupDocumentInfo\x12\x1c\n\x05\x64ocid\x18\x01 \x01(\x0b\x32\r.Common.Docid\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0bversionCode\x18\x03 \x01(\x05\x12%\n\x0ethumbnailImage\x18\x04 \x01(\x0b\x32\r.Common.Image\x12\x17\n\x0frestorePriority\x18\x05 \x01(\x05\x42+\n com.google.android.finsky.protosB\x07Restore')
,
dependencies=[common__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_GETBACKUPDOCUMENTCHOICESRESPONSE = _descriptor.Descriptor(
name='GetBackupDocumentChoicesResponse',
full_name='Restore.GetBackupDocumentChoicesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backupDocumentInfo', full_name='Restore.GetBackupDocumentChoicesResponse.backupDocumentInfo', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=40,
serialized_end=131,
)
_GETBACKUPDEVICECHOICESRESPONSE = _descriptor.Descriptor(
name='GetBackupDeviceChoicesResponse',
full_name='Restore.GetBackupDeviceChoicesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='backupDeviceInfo', full_name='Restore.GetBackupDeviceChoicesResponse.backupDeviceInfo', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=133,
serialized_end=218,
)
_BACKUPDEVICEINFO = _descriptor.Descriptor(
name='BackupDeviceInfo',
full_name='Restore.BackupDeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='androidId', full_name='Restore.BackupDeviceInfo.androidId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='Restore.BackupDeviceInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='restoreToken', full_name='Restore.BackupDeviceInfo.restoreToken', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='numDocuments', full_name='Restore.BackupDeviceInfo.numDocuments', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lastCheckinTimeMs', full_name='Restore.BackupDeviceInfo.lastCheckinTimeMs', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=220,
serialized_end=342,
)
_BACKUPDOCUMENTINFO = _descriptor.Descriptor(
name='BackupDocumentInfo',
full_name='Restore.BackupDocumentInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docid', full_name='Restore.BackupDocumentInfo.docid', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='title', full_name='Restore.BackupDocumentInfo.title', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='versionCode', full_name='Restore.BackupDocumentInfo.versionCode', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='thumbnailImage', full_name='Restore.BackupDocumentInfo.thumbnailImage', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='restorePriority', full_name='Restore.BackupDocumentInfo.restorePriority', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=345,
serialized_end=495,
)
_GETBACKUPDOCUMENTCHOICESRESPONSE.fields_by_name['backupDocumentInfo'].message_type = _BACKUPDOCUMENTINFO
_GETBACKUPDEVICECHOICESRESPONSE.fields_by_name['backupDeviceInfo'].message_type = _BACKUPDEVICEINFO
_BACKUPDOCUMENTINFO.fields_by_name['docid'].message_type = common__pb2._DOCID
_BACKUPDOCUMENTINFO.fields_by_name['thumbnailImage'].message_type = common__pb2._IMAGE
DESCRIPTOR.message_types_by_name['GetBackupDocumentChoicesResponse'] = _GETBACKUPDOCUMENTCHOICESRESPONSE
DESCRIPTOR.message_types_by_name['GetBackupDeviceChoicesResponse'] = _GETBACKUPDEVICECHOICESRESPONSE
DESCRIPTOR.message_types_by_name['BackupDeviceInfo'] = _BACKUPDEVICEINFO
DESCRIPTOR.message_types_by_name['BackupDocumentInfo'] = _BACKUPDOCUMENTINFO
GetBackupDocumentChoicesResponse = _reflection.GeneratedProtocolMessageType('GetBackupDocumentChoicesResponse', (_message.Message,), dict(
DESCRIPTOR = _GETBACKUPDOCUMENTCHOICESRESPONSE,
__module__ = 'restore_pb2'
# @@protoc_insertion_point(class_scope:Restore.GetBackupDocumentChoicesResponse)
))
_sym_db.RegisterMessage(GetBackupDocumentChoicesResponse)
GetBackupDeviceChoicesResponse = _reflection.GeneratedProtocolMessageType('GetBackupDeviceChoicesResponse', (_message.Message,), dict(
DESCRIPTOR = _GETBACKUPDEVICECHOICESRESPONSE,
__module__ = 'restore_pb2'
# @@protoc_insertion_point(class_scope:Restore.GetBackupDeviceChoicesResponse)
))
_sym_db.RegisterMessage(GetBackupDeviceChoicesResponse)
BackupDeviceInfo = _reflection.GeneratedProtocolMessageType('BackupDeviceInfo', (_message.Message,), dict(
DESCRIPTOR = _BACKUPDEVICEINFO,
__module__ = 'restore_pb2'
# @@protoc_insertion_point(class_scope:Restore.BackupDeviceInfo)
))
_sym_db.RegisterMessage(BackupDeviceInfo)
BackupDocumentInfo = _reflection.GeneratedProtocolMessageType('BackupDocumentInfo', (_message.Message,), dict(
DESCRIPTOR = _BACKUPDOCUMENTINFO,
__module__ = 'restore_pb2'
# @@protoc_insertion_point(class_scope:Restore.BackupDocumentInfo)
))
_sym_db.RegisterMessage(BackupDocumentInfo)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n com.google.android.finsky.protosB\007Restore'))
# @@protoc_insertion_point(module_scope)
| mit |
nandhp/youtube-dl | youtube_dl/extractor/ehow.py | 195 | 1518 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
class EHowIE(InfoExtractor):
IE_NAME = 'eHow'
_VALID_URL = r'https?://(?:www\.)?ehow\.com/[^/_?]*_(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.ehow.com/video_12245069_hardwood-flooring-basics.html',
'md5': '9809b4e3f115ae2088440bcb4efbf371',
'info_dict': {
'id': '12245069',
'ext': 'flv',
'title': 'Hardwood Flooring Basics',
'description': 'Hardwood flooring may be time consuming, but its ultimately a pretty straightforward concept. Learn about hardwood flooring basics with help from a hardware flooring business owner in this free video...',
'uploader': 'Erick Nathan',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r'(?:file|source)=(http[^\'"&]*)', webpage, 'video URL')
final_url = compat_urllib_parse_unquote(video_url)
uploader = self._html_search_meta('uploader', webpage)
title = self._og_search_title(webpage).replace(' | eHow', '')
return {
'id': video_id,
'url': final_url,
'title': title,
'thumbnail': self._og_search_thumbnail(webpage),
'description': self._og_search_description(webpage),
'uploader': uploader,
}
| unlicense |
oeeagle/quantum | neutron/extensions/loadbalancer.py | 2 | 18396 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.api.v2 import resource_helper
from neutron.common import exceptions as qexception
from neutron import manager
from neutron.plugins.common import constants
from neutron.services.service_base import ServicePluginBase
# Loadbalancer Exceptions
class VipNotFound(qexception.NotFound):
message = _("Vip %(vip_id)s could not be found")
class VipExists(qexception.NeutronException):
message = _("Another Vip already exists for pool %(pool_id)s")
class PoolNotFound(qexception.NotFound):
message = _("Pool %(pool_id)s could not be found")
class MemberNotFound(qexception.NotFound):
message = _("Member %(member_id)s could not be found")
class HealthMonitorNotFound(qexception.NotFound):
message = _("Health_monitor %(monitor_id)s could not be found")
class PoolMonitorAssociationNotFound(qexception.NotFound):
message = _("Monitor %(monitor_id)s is not associated "
"with Pool %(pool_id)s")
class PoolMonitorAssociationExists(qexception.Conflict):
message = _('health_monitor %(monitor_id)s is already associated '
'with pool %(pool_id)s')
class StateInvalid(qexception.NeutronException):
message = _("Invalid state %(state)s of Loadbalancer resource %(id)s")
class PoolInUse(qexception.InUse):
message = _("Pool %(pool_id)s is still in use")
class PoolStatsNotFound(qexception.NotFound):
message = _("Statistics of Pool %(pool_id)s could not be found")
class ProtocolMismatch(qexception.BadRequest):
message = _("Protocol %(vip_proto)s does not match "
"pool protocol %(pool_proto)s")
class MemberExists(qexception.NeutronException):
message = _("Member with address %(address)s and port %(port)s "
"already present in pool %(pool)s")
RESOURCE_ATTRIBUTE_MAP = {
'vips': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '',
'is_visible': True},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'subnet_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'address': {'allow_post': True, 'allow_put': False,
'default': attr.ATTR_NOT_SPECIFIED,
'validate': {'type:ip_address_or_none': None},
'is_visible': True},
'port_id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'protocol_port': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': [0, 65535]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'protocol': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': ['TCP', 'HTTP', 'HTTPS']},
'is_visible': True},
'pool_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid': None},
'is_visible': True},
'session_persistence': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'default': {},
'validate': {
'type:dict_or_empty': {
'type': {'type:values': ['APP_COOKIE',
'HTTP_COOKIE',
'SOURCE_IP'],
'required': True},
'cookie_name': {'type:string': None,
'required': False}}},
'is_visible': True},
'connection_limit': {'allow_post': True, 'allow_put': True,
'default': -1,
'convert_to': attr.convert_to_int,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'status_description': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'pools': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'vip_id': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '',
'is_visible': True},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'subnet_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'protocol': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': ['TCP', 'HTTP', 'HTTPS']},
'is_visible': True},
'provider': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'is_visible': True, 'default': attr.ATTR_NOT_SPECIFIED},
'lb_method': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True},
'members': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'health_monitors': {'allow_post': True, 'allow_put': True,
'default': None,
'validate': {'type:uuid_list': None},
'convert_to': attr.convert_to_list,
'is_visible': True},
'health_monitors_status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'status_description': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'members': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'pool_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid': None},
'is_visible': True},
'address': {'allow_post': True, 'allow_put': False,
'validate': {'type:ip_address': None},
'is_visible': True},
'protocol_port': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': [0, 65535]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'weight': {'allow_post': True, 'allow_put': True,
'default': 1,
'validate': {'type:range': [0, 256]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'status_description': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'health_monitors': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'type': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': ['PING', 'TCP', 'HTTP', 'HTTPS']},
'is_visible': True},
'delay': {'allow_post': True, 'allow_put': True,
'validate': {'type:non_negative': None},
'convert_to': attr.convert_to_int,
'is_visible': True},
'timeout': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_to_int,
'is_visible': True},
'max_retries': {'allow_post': True, 'allow_put': True,
'validate': {'type:range': [1, 10]},
'convert_to': attr.convert_to_int,
'is_visible': True},
'http_method': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': 'GET',
'is_visible': True},
'url_path': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '/',
'is_visible': True},
'expected_codes': {'allow_post': True, 'allow_put': True,
'validate': {
'type:regex':
'^(\d{3}(\s*,\s*\d{3})*)$|^(\d{3}-\d{3})$'},
'default': '200',
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'status_description': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'pools': {'allow_post': False, 'allow_put': False,
'is_visible': True}
}
}
SUB_RESOURCE_ATTRIBUTE_MAP = {
'health_monitors': {
'parent': {'collection_name': 'pools',
'member_name': 'pool'},
'parameters': {'id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
}
}
}
class Loadbalancer(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "LoadBalancing service"
@classmethod
def get_alias(cls):
return "lbaas"
@classmethod
def get_description(cls):
return "Extension for LoadBalancing service"
@classmethod
def get_namespace(cls):
return "http://wiki.openstack.org/neutron/LBaaS/API_1.0"
@classmethod
def get_updated(cls):
return "2012-10-07T10:00:00-00:00"
@classmethod
def get_resources(cls):
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
plural_mappings['health_monitors_status'] = 'health_monitor_status'
attr.PLURALS.update(plural_mappings)
action_map = {'pool': {'stats': 'GET'}}
resources = resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
constants.LOADBALANCER,
action_map=action_map)
plugin = manager.NeutronManager.get_service_plugins()[
constants.LOADBALANCER]
for collection_name in SUB_RESOURCE_ATTRIBUTE_MAP:
# Special handling needed for sub-resources with 'y' ending
# (e.g. proxies -> proxy)
resource_name = collection_name[:-1]
parent = SUB_RESOURCE_ATTRIBUTE_MAP[collection_name].get('parent')
params = SUB_RESOURCE_ATTRIBUTE_MAP[collection_name].get(
'parameters')
controller = base.create_resource(collection_name, resource_name,
plugin, params,
allow_bulk=True,
parent=parent)
resource = extensions.ResourceExtension(
collection_name,
controller, parent,
path_prefix=constants.COMMON_PREFIXES[constants.LOADBALANCER],
attr_map=params)
resources.append(resource)
return resources
@classmethod
def get_plugin_interface(cls):
return LoadBalancerPluginBase
def update_attributes_map(self, attributes):
super(Loadbalancer, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class LoadBalancerPluginBase(ServicePluginBase):
def get_plugin_name(self):
return constants.LOADBALANCER
def get_plugin_type(self):
return constants.LOADBALANCER
def get_plugin_description(self):
return 'LoadBalancer service plugin'
@abc.abstractmethod
def get_vips(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_vip(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_vip(self, context, vip):
pass
@abc.abstractmethod
def update_vip(self, context, id, vip):
pass
@abc.abstractmethod
def delete_vip(self, context, id):
pass
@abc.abstractmethod
def get_pools(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_pool(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_pool(self, context, pool):
pass
@abc.abstractmethod
def update_pool(self, context, id, pool):
pass
@abc.abstractmethod
def delete_pool(self, context, id):
pass
@abc.abstractmethod
def stats(self, context, pool_id):
pass
@abc.abstractmethod
def create_pool_health_monitor(self, context, health_monitor, pool_id):
pass
@abc.abstractmethod
def get_pool_health_monitor(self, context, id, pool_id, fields=None):
pass
@abc.abstractmethod
def delete_pool_health_monitor(self, context, id, pool_id):
pass
@abc.abstractmethod
def get_members(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_member(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_member(self, context, member):
pass
@abc.abstractmethod
def update_member(self, context, id, member):
pass
@abc.abstractmethod
def delete_member(self, context, id):
pass
@abc.abstractmethod
def get_health_monitors(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_health_monitor(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_health_monitor(self, context, health_monitor):
pass
@abc.abstractmethod
def update_health_monitor(self, context, id, health_monitor):
pass
@abc.abstractmethod
def delete_health_monitor(self, context, id):
pass
| apache-2.0 |
admiralobvious/flask-simpleldap | examples/groups/app.py | 1 | 1835 | import ldap as l
from flask import Flask, g, request, session, redirect, url_for
from flask_simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
app.config['LDAP_CUSTOM_OPTIONS'] = {l.OPT_REFERRALS: 0}
ldap = LDAP(app)
@app.before_request
def before_request():
g.user = None
if 'user_id' in session:
# This is where you'd query your database to get the user info.
g.user = {}
# Create a global with the LDAP groups the user is a member of.
g.ldap_groups = ldap.get_user_groups(user=session['user_id'])
@app.route('/')
@ldap.login_required
def index():
return 'Successfully logged in!'
@app.route('/login', methods=['GET', 'POST'])
def login():
if g.user:
return redirect(url_for('index'))
if request.method == 'POST':
user = request.form['user']
passwd = request.form['passwd']
test = ldap.bind_user(user, passwd)
if test is None or passwd == '':
return 'Invalid credentials'
else:
session['user_id'] = request.form['user']
return redirect('/')
return """<form action="" method="post">
user: <input name="user"><br>
password:<input type="password" name="passwd"><br>
<input type="submit" value="Submit"></form>"""
@app.route('/group')
@ldap.group_required(groups=['Web Developers', 'QA'])
def group():
return 'Group restricted page'
@app.route('/logout')
def logout():
session.pop('user_id', None)
return redirect(url_for('index'))
if __name__ == '__main__':
app.run()
| mit |
jjmleiro/hue | desktop/core/ext-py/guppy-0.1.10/guppy/heapy/test/test_Classifiers.py | 37 | 26467 | #._cv_part guppy.heapy.test.test_Classifiers
from guppy.heapy.test import support
PORTABLE_TEST = 1 # Relax tests to be more portable
class TestCase(support.TestCase):
def setUp(self):
support.TestCase.setUp(self)
self.View.is_rg_update_all = False
self.US = US = self.heapy.UniSet
self.Use = Use = self.heapy.Use
Use.reprefix = 'hp.'
self.do = lambda x:x.dictof
self.un = Use.Anything.fam
self.ty = Use.Type
self.cl = Use.Class
self.rc = Use.Rcs
self.iso = Use.iso
self.Anything = US.Anything
self.Nothing = US.Nothing
class C1:
def x(self):
return 0
class C2:
pass
c1 = C1()
self.C1 = C1
self.C2 = C2
self.c1 = c1
def lt(self, a, b):
self.assert_(a < b)
def eq(self, a, b):
self.assert_(a == b)
def dj(self, a, b):
# disjoint; not related by <= or >=, and not overlapping
self.assert_(not a <= b)
self.assert_(not b <= a)
self.assert_(not a & b)
self.assert_(a.disjoint(b))
def nr(self, a, b):
# not related by <= or >=, and overlapping
self.assert_(not a <= b)
self.assert_(not b <= a)
self.assert_(a & b)
self.assert_(not a.disjoint(b))
class NewCases(TestCase):
# New cases that came up after all the original tests had passed,
# according to notes at Tue Nov 23 06:40:59 MET 2004.
# To keep original tests intact, for consistency, speed reasons.
def test_owners(self):
# Test the .owners attribute
iso = self.iso
class C:
pass
class D:
pass
c = C()
d = D()
self.aseq(iso(c.__dict__, d.__dict__, C.__dict__).owners,
iso(c, d, C))
class SpecialCases(TestCase):
# Special tests that catch cases that came up during development & debugging
def test_1(self):
un = self.un
ty = self.ty
cl = self.cl
do = self.do
rc = self.rc
iso = self.iso
All = self.Anything
Nothing = self.Nothing
C1 = self.C1
C2 = self.C2
c1 = self.c1
def eq(a, b):
self.assert_(a == b)
self.assert_(str(a) == str(b))
e1 = []
e2 = {}
e3 = []
e4 = ()
a = rc(cl(C1)) & ty(type(c1))
b = rc(cl(C1))
eq((b - a) | a, b)
eq(a | (b - a), b)
a = rc(cl(C1)) & ~ty(type(c1))
b = ty(type(c1)) & ~rc(cl(C1))
eq(a | b, b | a)
a = ty(int)
b = cl(C1)
c = All
eq(c - (a & b), (c - a) | (c - b))
a = cl(C1)
b = rc(ty(dict))
c = iso(c1)
eq( (a | b) | c , a | (b | c))
a = ty(int)
b = ty(dict)
self.assert_( ~a & ~b != Nothing)
eq( ty(list) & iso(e1, e2, e3), iso(e1, e3))
eq( (ty(list) | ty(dict)) & iso(e1, e2, e3, e4), iso(e1, e2, e3))
eq( (ty(list) &~rc(cl(C1))) & iso(e1, e2, e3), iso(e1, e3))
eq( iso(e1,e3) | ty(list), ty(list))
eq( ty(list) | iso(e1,e3), ty(list))
eq( iso(e1,e3) - iso(e3), iso(e1))
eq( ~iso(e3) & iso(e1,e3), iso(e1))
eq( iso(e1,e2,e3) - ty(dict), iso(e1,e3))
eq( ~ty(dict) & iso(e1,e2,e3), iso(e1,e3))
eq( ty(dict) | iso(e1,e2), ty(dict) | iso(e1))
eq( iso(e1,e2) | ty(dict), ty(dict) | iso(e1))
eq( (ty(dict) | ty(tuple)) | iso(e1,e2), (ty(dict) | ty(tuple)) | iso(e1))
eq( iso(e1,e2) | (ty(dict) | ty(tuple)), (ty(dict) | ty(tuple)) | iso(e1))
eq( (ty(dict) & ~rc(cl(C1))) | iso(e1,e2), (ty(dict) & ~rc(cl(C1))) | iso(e1))
eq( iso(e1,e2) | (ty(dict) & ~rc(cl(C1))), (ty(dict) & ~rc(cl(C1))) | iso(e1))
eq( ~ty(dict) | iso(e1, e2), ~ty(dict) | iso(e2))
eq( iso(e1, e2) | ~ty(dict), ~ty(dict) | iso(e2))
eq( ty(dict) - iso(e1,e2), ty(dict) - iso(e2))
eq( ~iso(e1,e2) & ty(dict), ty(dict) - iso(e2))
eq( iso(e1,e3) ^ iso(e2), iso(e1,e2,e3))
eq( iso(e1,e3) ^ iso(e2,e3), iso(e1,e2))
eq( iso(e1,e3) ^ iso(e1,e3), Nothing)
eq( iso(e1,e3) <= ty(list), True)
eq( iso(e1,e2) <= ty(list)|ty(dict), True)
eq( ty(list) >= iso(e1,e3), True)
eq( ty(list)|ty(dict) >= iso(e1,e2), True)
def test_2(self):
un = self.un
ty = self.ty
cl = self.cl
do = self.do
rc = self.rc
iso = self.iso
All = self.Anything
Nothing = self.Nothing
C1 = self.C1
C2 = self.C2
c1 = self.c1
class C3(object):
def x(self):
return 1
def asrt(x):
self.assert_(x)
def no(x):
self.assert_(not x)
eq = self.aseq
# Tests to do with Nothing being finite - having length and iteration
no(dict in (ty(dict) | ty(int)))
no([] in (ty(dict) | ty(int)))
asrt({} in (ty(dict) | ty(int)))
asrt(dict in (ty(dict) | ty(int) | ty(type(dict))))
asrt(list(ty(list) & iso({})) == [])
# When creating ISO classes, we don't want to memoize them
# which would leak the elements.
from sys import getrefcount as grc
import sys, types
c = C1()
rc = grc(c)
x = iso(c)
x=None
eq(grc(c), rc)
def test_dictowner(self):
# Special test for dict ownership
# motivated by: dicts that are not found in traversal, should not
# cause repeated (unsuccessfull) updates of dict ownership
# This is a performance question, requires special kind of testing
#
# Also tests that dict & dict owners are not leaked
import sys
if sys.hexversion >= 0x02070000:
print "XXX SKIPPING test_dictowner TEST BECAUSE OF SLUGGISHNESS WITH PYTHON 2.7"
return
import gc
from sys import getrefcount as grc
Use = self.Use
C1 = self.C1
c1 = self.c1
iso = self.iso
o = self.python.StringIO.StringIO()
# Create a dict hidden from view
d1 = self.View.immnodeset([{}])
d3 = {}
# Remember the initial ref counts for target objects
gc.collect()
rcd1 = grc(list(d1)[0])
rcd3 = grc(d3)
rcC1 = grc(C1)
rcc1 = grc(c1)
rcdc1 = grc(c1.__dict__)
clock = self.python.time.time
N = 5
M = 50
# This was the fast case, when only reachable dicts are classified
for i in range(N):
print >>o, iso(d3).kind
print >>o, iso(c1.__dict__).kind
# Now measure it
while 1:
gc.collect()
t = clock()
for i in range(M):
iso(d3).kind
iso(c1.__dict__).kind
fast = clock()-t
if fast >= 0.5: # Enough resolution?
break
else:
M *= 2 # No, try more loops
# This was a slow case; involving repeated classification of a unreachable dict
# It was originally 4.97 times slower when N was 5
# The problem occurs for successive classifications of different dicts,
# when at least one of them is unreachable.
gc.collect()
for i in range(N):
print >>o, iso(*d1).kind
print >>o, iso(c1.__dict__).kind
gc.collect()
# Now measure it
t = clock()
for i in range(M):
iso(*d1).kind
iso(c1.__dict__).kind
slow = clock()-t
#print 'slow,fast',slow,fast
self.assert_( slow <= 1.5*fast )
# This is another slow case according to notes Nov 18 2004.
# A succession of different unreachable dicts.
gc.collect()
dn = self.View.immnodeset([{} for i in range(N)])
for i in range(N):
print >>o, iso(list(dn)[i]).kind
# Now measure it
gc.collect()
dn = self.View.immnodeset([{} for i in range(M)])
t = clock()
for i in range(M):
iso(list(dn)[i]).kind
slow = clock()-t
#print 'slow,fast',slow,fast
self.assert_( slow <= 1.5*fast )
# Partition was likewise slow for unreachable dicts
dn = self.View.immnodeset([{} for i in range(N)])
gc.collect()
print >>o, [x[0] for x in Use.Clodo.classifier.partition(dn)]
# Now measure it
dn = self.View.immnodeset([{} for i in range(M)])
gc.collect()
t = clock()
[x[0] for x in Use.Clodo.classifier.partition(dn)]
slow = clock()-t
#print 'slow,fast',slow,fast
self.assert_( slow <= 1.5*fast )
# Check that ref counts for target objects are the same as initially
gc.collect()
gc.collect() # Note May 17 2005
self.aseq(grc(list(d1)[0]), rcd1)
self.aseq(grc(d3), rcd3)
self.aseq(grc(c1), rcc1)
self.aseq(grc(C1), rcC1)
self.aseq(grc(c1.__dict__), rcdc1)
self.aseq( o.getvalue(), """\
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict of <Module>.C1
dict (no owner)
dict (no owner)
dict (no owner)
dict (no owner)
dict (no owner)
[hp.Nothing.dictof]
""".replace('<Module>', self.__module__))
def test_retclaset(self):
# Test (A) that referrer classifications don't leak their classes
# and (B) that selection is not disturbed by list arguments
# (This is removed since it doesnt always work)
# and (C) that selection does update referrer graph correctly
self.__module__ = '<Module>' # Make the rendering independent on our name
from sys import getrefcount as grc
import gc
C1 = self.C1
c1 = self.c1
iso = self.iso
rcC1 = grc(C1)
o = self.python.StringIO.StringIO()
print >>o, iso(C1).byrcs.kind
s = iso(c1).byrcs.kind
print >>o, s
self.aseq(s & iso(c1), iso(c1))
x = C1()
# set_trace()
if 0:
self.aseq( s & [c1, x], iso(c1))# (B) makes sure arg is removed from frame when converted
else:
self.aseq(s & iso(c1, x), iso(c1))
s = iso(x).byrcs.kind
self.aseq( s & iso(c1, x), iso(x))
x = C1()
self.aseq( s & iso(c1, x), iso(x)) # (C) make sure referrer graph is updated by select
s = None
x = None
locals().clear()
gc.collect()
gc.collect() # Note May 17 2005
self.aseq(grc(C1), rcC1) # (A)
def test_alt_retclaset(self):
# Test the alternative referrer memo update
# On low level, and the speed of selection
import gc
iso = self.iso
a = []
b = self.View.immnodeset([[]])
x = [a, b]
if 1:
hv = self.View.hv
rg = self.View.nodegraph()
gc.collect()
hv.update_referrers_completely(rg)
self.assert_( x in rg[a] )
self.assert_(rg[list(b)[0]] == (None,))
rg.clear()
rg=None
# Test View functionality
self.View.is_rg_update_all = True
gc.collect()
iso(a).referrers
self.assert_( a in self.View.rg.get_domain() )
self.assert_( list(b)[0] in self.View.rg.get_domain())
clock = self.python.time.clock
s = iso(a)
N = 1000
while 1:
t = clock()
for i in range(N):
s.referrers
fast = clock()-t
if fast >= 0.5:
break
N *= 2 # CPU is too fast to get good resolution, try more loops
t = clock()
for i in range(N):
self.View.rg.domain_covers([a])
self.View.rg[a]
faster = clock()-t
s = iso(*b)
t = clock()
for i in range(N):
s.referrers
slow = clock() - t
#print 'slow,fast,faster',slow, fast, faster
self.assert_(not slow > fast * 4)
def test_via(self, vlist=['v',]): # vlist is just to make v unoptimizable
# Special tests for the via classifier
from sys import getrefcount as grc
import gc
iso = self.iso
hp = self.Use
d = {}
k = ('k',)
v = tuple(vlist) # Make sure v is not optimized to a constant
d[k] = v
d[v] = v
rck = grc(k)
rcv = grc(v)
s = iso(v)
self.assert_( s.byvia.kind == hp.Via("_.f_locals['v']", "_[('k',)]", "_[('v',)]", '_.keys()[1]') or
s.byvia.kind == hp.Via("_.f_locals['v']", "_[('k',)]", "_[('v',)]", '_.keys()[0]'))
del s
gc.collect()
gc.collect()
self.aseq(grc(k), rck)
self.aseq(grc(v), rcv )
class RenderCase(TestCase):
def test_rendering(self):
import sys, types
iso = self.iso
C1 = self.C1
c1 = self.c1
class C3(object):
def x(self):
return 1
e1 = []
e2 = {}
e3 = []
o = self.python.StringIO.StringIO()
# str'ing of homogenous & inhoumogenous values
self.US.summary_str.str_address = lambda x:'<address>'
def ps(x):
print >>o, x.brief
ps( iso(1,2) )
ps( iso(1,2.0, 3.0) )
ps( iso(e1) )
ps( iso(e1, e2) )
ps( iso(e1, e3) )
ps( iso(self.python.exceptions.TypeError()) )
ps( iso(None) )
ps( iso(sys, support, types) )
ps( iso(int, types.ClassType, C3) )
ps( iso(C1()) )
ps( iso(C3()) )
ps( iso(C1) )
ps( iso(C3) )
ps( iso(len) )
ps( iso(self.setUp) )
ps( iso(C1.x) )
ps( iso(C1.x.im_func) )
ps( iso(C1().x) )
ps( iso(C3.x) )
ps( iso(C3().x) )
ps( iso({}) )
ps( iso(c1.__dict__) )
ps( iso(types.__dict__) )
try:
1/0
except:
typ, value, traceback = sys.exc_info()
ps( iso(traceback) )
ps( iso(traceback.tb_frame) )
expected = """\
<2 int: 1, 2>
<3 (float | int): <2 float: 2.0, 3.0> | <1 int: 1>>
<1 list: <address>*0>
<2 (dict (no owner) | list): <1 dict (no owner): <address>*0> | <1 list: <ad...>
<2 list: <address>*0, <address>*0>
<1 exceptions.TypeError: <address>>
<1 types.NoneType: None>
<3 module: guppy.heapy.test.support, sys, types>
<3 type: class, <Module>.C3, int>
<1 <Module>.C1: <address>>
<1 <Module>.C3: <address>>
<1 class: <Module>.C1>
<1 type: <Module>.C3>
<1 types.BuiltinFunctionType: len>
<1 types.MethodType: <<Module>.RenderCase at <addre...>
<1 types.MethodType: <Module>.C1.x>
<1 function: <Module>.x>
<1 types.MethodType: <<Module>.C1 at <address>>.x>
<1 types.MethodType: <Module>.C3.x>
<1 types.MethodType: <<Module>.C3 at <address>>.x>
<1 dict (no owner): <address>*0>
<1 dict of <Module>.C1: <address>>
<1 dict of module: types>
<1 types.TracebackType: <in frame <test_rendering at <address>> at <address>>>
<1 types.FrameType: <test_rendering at <address>>>
""".replace('<Module>', self.__module__)
self.aseq(o.getvalue(), expected)
if PORTABLE_TEST:
return
o = self.python.StringIO.StringIO()
# The following is nonportable, sizes may change
# In particular, the list size changed from 2.3 to 2.4
# The following test is only for 2.3 in 32-bit python
# pp'ing prints in a nice form
# This tests all types currently defined in Classifiers.Summary_str
# and then some
# Except: frametype; its size varies from time to time!
x = iso(len, C1, 1.0+3j, {1:2,3:4}, 1.25, C1.x.im_func, 1, ['list'],
100000000000l, None, C1.x, C1().x, C3.x, C3().x, sys, support,
'string', ('tuple',), C3, int, type(None),
# and some types not defined
C1(), C3(), c1.__dict__
)
print >>o, x
print >>o, x.more
# Test instancetype; we need to replace the classifier with bytype
x = iso(C1()).bytype
print >>o, x
if 0:
print o.getvalue()
else:
expected = """\
Partition of a set of 24 objects. Total size = 2128 bytes.
Index Count % Size % Cumulative % Kind (class / dict of class)
0 3 12 1272 60 1272 60 type
1 4 17 144 7 1416 67 types.MethodType
2 1 4 136 6 1552 73 dict (no owner)
3 1 4 136 6 1688 79 dict of <Module>.C1
4 1 4 60 3 1748 82 list
5 1 4 56 3 1804 85 function
6 2 8 48 2 1852 87 module
7 1 4 44 2 1896 89 class
8 1 4 32 2 1928 91 <Module>.C1
9 1 4 32 2 1960 92 str
<8 more rows. Type e.g. '_.more' to view.>
Index Count % Size % Cumulative % Kind (class / dict of class)
10 1 4 32 2 1992 94 types.BuiltinFunctionType
11 1 4 28 1 2020 95 <Module>.C3
12 1 4 28 1 2048 96 tuple
13 1 4 24 1 2072 97 complex
14 1 4 20 1 2092 98 long
15 1 4 16 1 2108 99 float
16 1 4 12 1 2120 100 int
17 1 4 8 0 2128 100 types.NoneType
Partition of a set of 1 object. Total size = 32 bytes.
Index Count % Size % Cumulative % Type
0 1 100 32 100 32 100 types.InstanceType
""".replace('<Module>', self.__module__)
self.aseq(o.getvalue(), expected)
class BaseCase(TestCase):
def test_minmax(self):
s = self.guppy.sets.immbitset
min = self.US.minimals
max = self.US.maximals
self.aseq( min([]), [])
self.aseq( min([1]), [1])
self.aseq( min([1,1]), [1])
self.aseq( min([1,2]), [1])
self.aseq( min([[],[]]), [[]])
self.aseq( min([s([1]),s([1,2])]), [s([1])])
self.aseq( min([s([1]),s([1,2]),s([3])]), [s([1]),s([3])])
self.aseq( max([]), [])
self.aseq( max([1]), [1])
self.aseq( max([1,1]), [1])
self.aseq( max([1,2]), [2])
self.aseq( max([[],[]]), [[]])
self.aseq( max([s([1]),s([1,2])]), [s([1,2])])
self.aseq( max([s([1]),s([1,2]),s([3])]), [s([1,2]), s([3])])
def test_base_classes(self):
un = self.un
ty = self.ty
cl = self.cl
do = self.do
rc = self.rc
iso = self.iso
All = self.Anything
Nothing = self.Nothing
C1 = self.C1
C2 = self.C2
c1 = self.c1
lt = self.lt
eq = self.eq
dj = self.dj
nr = self.nr
data = [
(All, eq, All),
(ty(int), eq, ty(int)),
(ty(int), dj, ty(dict)),
(ty(int), lt, All),
(cl(C1), eq, cl(C1)),
(cl(C1), dj, cl(C2)),
(cl(C1), lt, ty(type(C1()))),
(cl(C1), dj, ty(int)),
(cl(C1), lt, All),
(do(cl(C1)), eq, do(cl(C1))),
(do(cl(C1)), lt, All),
(do(cl(C1)), dj, do(cl(C2))),
(do(cl(C1)), dj, cl(C1)),
(do(cl(C1)), dj, ty(int)),
(do(cl(C1)), dj, do(ty(type(C1())))),
(do(cl(C1)), lt, ty(dict)),
(do(cl(C1)), dj, do(rc(ty(dict)))),
(rc(ty(dict)),eq, rc(ty(dict))),
(rc(ty(dict)),lt, All),
(rc(ty(dict)),dj, rc(ty(list))),
(rc(cl(C1)), dj, rc(ty(type(C1())))),
(rc(cl(C1)), nr, ty(type(C1()))),
(rc(cl(C1)), nr, cl(C1)),
# (rc(cl(C1)), dj, rc(rc(cl(C1)))), # Not allowed form anymore / Nov 4 2005
(rc(cl(C1)), dj, rc(do(cl(C1)))),
(iso(1), eq, iso(1)),
(iso(1), lt, All),
(iso(1), dj, iso(2)),
(iso(1), lt, ty(int)),
(iso(1), dj, ty(dict)),
(iso(1), dj, cl(C1)),
(iso(c1), lt, cl(C1)),
(iso(c1.__dict__),lt,do(cl(C1))),
(iso(1), dj, do(cl(C1))),
(iso(1), dj, rc(cl(C1))),
(Nothing, eq, Nothing),
(Nothing, lt, ty(int)),
(Nothing, lt, iso(1)),
]
# Test relation of base classifications
for a, cmp, b in data:
cmp(a, b)
# Test the four set-operations: & | - ^
# depending on the asserted argument relation
if cmp is eq:
eq(b, a)
elif cmp is lt:
self.assert_(b > a)
eq(b ^ a, b - a) # Simple transformation
eq(a ^ b, b - a) # -=-, indep. of type
lt(a, b)
elif cmp is dj:
dj(b, a) # check that the dj relation is symmetric
eq(a & b, Nothing)
eq(b & a, Nothing)
eq(a | b, b | a)
eq(a - b, a)
eq((a | b) - b, a)
eq(a ^ b, a | b)
eq(b ^ a, a | b)
lt(a, a | b)
lt(b, a | b)
elif cmp is nr:
nr(b, a) # symmetric as well
eq(a & b, b & a)
eq(a & b & b, a & b)
eq((a & b) - b, Nothing)
eq((a | b) - b, a - b)
eq(a | b, b | a)
lt(Nothing, a & b)
lt(Nothing, b & a)
lt(a & b, a)
lt(a & b, b)
lt(a - b, a)
dj(a - b, b)
lt(a ^ b, a | b)
lt(a, a | b)
lt(b, a | b)
def test_invalid_operations(self):
US = self.US
US.auto_convert_iter = False
US.auto_convert_type = False
US.auto_convert_class = False
cl = self.cl
ty = self.ty
c1 = self.c1
self.failUnlessRaises(TypeError, lambda : cl(c1))
self.failUnlessRaises(TypeError, lambda : ty(c1))
self.failUnlessRaises(TypeError, lambda:ty(int) <= None)
self.failUnlessRaises(TypeError, lambda:None >= ty(int))
self.failUnlessRaises(TypeError, lambda:None <= ty(int))
self.failUnlessRaises(TypeError, lambda:list(ty(int)))
self.failUnlessRaises(TypeError, lambda:len(ty(int)))
self.failUnlessRaises(TypeError, lambda:ty(int) & None)
self.failUnlessRaises(TypeError, lambda:None & ty(int))
self.failUnlessRaises(TypeError, lambda:ty(int) | None)
self.failUnlessRaises(TypeError, lambda:None | ty(int))
self.failUnlessRaises(TypeError, lambda:ty(int) - None)
self.failUnlessRaises(TypeError, lambda:None - ty(int))
self.failUnlessRaises(TypeError, lambda:ty(int) ^ None)
self.failUnlessRaises(TypeError, lambda:None ^ ty(int))
self.failUnlessRaises(TypeError, lambda: ty(int) | [14])
self.failUnlessRaises(TypeError, lambda: ty(int) | dict)
self.failUnlessRaises(TypeError, lambda: ty(int) | self.C1)
def test_fancy_list_args(self):
# Test the, normally disabled, possibility to use iterables as
# right and left arguments in set expressions.
# This option can cause problems as noted 22/11 2004.
self.US.auto_convert_iter = True
eq = self.eq
iso = self.iso
ty = self.ty
e1 = []
e2 = {}
e3 = []
e4 = ()
eq( ty(list) & [e1, e2, e3], iso(e1,e3))
eq( [e1, e2, e3] & ty(list) , iso(e1,e3)) # Requires __rand__
eq( [e1,e2,e4] & (ty(dict)|ty(list)) == [e1,e2], True)
eq( [e1,e2] & (ty(dict)|ty(list)) == [e1,e2], True)
eq( iso(e1,e2) & (ty(dict)|ty(list)) == [e1,e2], True)
eq( iso(e1,e2) & [e1, e3], iso(e1))
eq( iso(e1,e2) | [e1, e3], iso(e1,e2,e3))
eq( [e1, e3] | iso(e1,e2), iso(e1,e2,e3)) # Requires __ror__
eq( iso(e1,e3) - [e3], iso(e1))
eq( [e1,e3] - iso(e3), iso(e1)) # Requires __rsub__
eq( [e1,e2,e3] - ty(dict), iso(e1,e3))
eq( ~ty(dict) & [e1,e2,e3], iso(e1,e3))
eq( iso(e1,e3) ^ [e2], iso(e1,e2,e3))
eq( [e2] ^ iso(e1,e3), iso(e1,e2,e3)) # Requires __rxor__
eq( [e1,e2] <= iso(e1,e2,e3), True)
eq( [e1,e2] <= ty(list)|ty(dict), True)
eq( (ty(list)|ty(dict)) >= [e1,e2], True)
eq( [e1,e2] <= ty(list), False)
eq( [e1,e2] <= iso(e1), False)
eq( [e1,e2] >= iso(e1,e2,e3), False)
eq( [e1,e2] >= iso(e1,e2), True)
eq( iso(e1,e2,e3) <= [e1,e2], False)
eq( iso(e1,e2) <= [e1,e2], True)
eq( iso(e1,e2,e3) >= [e1,e2], True)
eq( iso(e1,e2) >= [e1,e2,e3], False)
def test_fancy_type_conversions(self):
# Test the, perhaps optional, possibility to use types and classes
# in classification set expressions.
self.US.auto_convert_type = True
self.US.auto_convert_class = True
un = self.un
ty = self.ty
cl = self.cl
do = self.do
rc = self.rc
iso = self.iso
All = self.Anything
Nothing = self.Nothing
C1 = self.C1
C2 = self.C2
c1 = self.c1
def eq(a, b):
self.assert_(a == b)
e1 = []
e2 = {}
e3 = []
e4 = ()
eq( ty(dict), dict)
eq( cl(C1), C1)
eq( iso(e1,e2) & dict, iso(e2))
eq( dict & iso(e1,e2), iso(e2))
eq( iso(e1,e2) | dict, iso(e1) | ty(dict))
eq( dict | iso(e1,e2), iso(e1) | ty(dict))
eq( iso(e1,e2) - dict, iso(e1))
eq( dict - iso(e1,e2), ty(dict) - iso(e2))
eq( iso(e1,e2, e3) ^ dict, (ty(dict)-iso(e2))|iso(e1,e3))
class LawsCase(TestCase):
def test_laws(self):
un = self.un
ty = self.ty
cl = self.cl
do = self.do
rc = self.rc
iso = self.iso
All = self.Anything
Nothing = self.Nothing
C1 = self.C1
C2 = self.C2
c1 = self.c1
lt = self.lt
eq = self.eq
t = self.guppy.sets.test
absorption = t.absorption
associative = t.associative
commutative = t.commutative
deMorgan = t.deMorgan
distributive = t.distributive
idempotence = t.idempotence
inclusion = t.inclusion
def ltr(a, b, level=3):
lt(a, b)
eq(a & b, a)
eq(b & a, a)
eq(a | b, b)
eq(b | a, b)
eq(a - b, Nothing)
eqr(b - a, b - a)
eq((b - a) | a, b)
eq(a | (b - a), b)
eq(a & (b - a), Nothing)
eq((b - a) & a, Nothing)
eq((b - a) - a, (b - a))
eq(a - (b - a), a) # note Nov 3 2004
if level > 0:
if a is Nothing:
eq(b - a, b)
else:
ltr(b - a, b, level-1)
def eqr(a, b, level = 1):
eq(a, b)
eq(a & b, a)
eq(a | b, a)
eq(a - b, Nothing)
eq(a ^ b, Nothing)
if level:
eqr(b, a, level - 1)
classes = [All, ty(int), ty(type(c1)), cl(C1), do(cl(C1)), rc(ty(dict)), iso(c1), Nothing]
for a in classes:
idempotence(a)
for b in classes:
if a <= b:
if b <= a:
eqr(a, b)
else:
ltr(a, b)
elif b <= a:
ltr(b, a)
absorption(a, b)
commutative(a, b)
inclusion(a, b)
deMorgan(a, b)
for c in classes:
associative(a, b, c)
deMorgan(a, b, c)
distributive(a, b, c)
class ClassificationCase(TestCase):
def test_classification(self):
# Test classification by the standard classifiers
self.View.is_rg_update_all = True # Tricky details Note Apr 22 2005
Use = self.Use
iso = self.iso
nodeset = self.heapy.UniSet.immnodeset
class A:
pass
class B(object):
pass
a = A()
b = B()
li = [1, [], {}, a, b, a.__dict__, b.__dict__]
for o in li:
self.asis(iso(o).bytype.kind.arg, type(o))
for o in li:
kind = type(o)
if kind == type(a):
kind = o.__class__
self.asis(iso(o).byclass.kind.arg, kind)
for o in li:
if o is a.__dict__:
kind = iso(a).kind
elif o is b.__dict__:
kind = iso(b).kind
elif type(o) is dict:
kind = Use.Nothing
elif o is a:
kind = a.__class__
else:
kind = type(o)
self.aseq(iso(o).kind.arg, kind)
cla = iso(()).byunity.kind
self.asis(cla.arg, None)
for o in li:
self.aseq(iso(o).byunity.kind, cla)
for o in li:
self.aseq(iso(o).byid.kind, Use.Id(id(o)))
#self.View.update_referrers(nodeset(li))
for i, o in enumerate(li):
cl = iso(o).byrcs.kind
if 1 <= i <= 2:
self.aseq(cl, Use.Clodo.sokind(list).refdby)
if i == 5:
self.aseq(cl, Use.Clodo.sokind(A)(list).refdby)
if i == 6:
self.aseq(cl, Use.Clodo.sokind(B)(list).refdby)
def test_selection(self):
# Test classifications operations via selection invariant
Use = self.Use
class A:
pass
class B(object):
pass
a = A()
b = B()
li = Use.iso(135l, [], {}, a, b, a.__dict__, b.__dict__)
allers = (Use.Unity, Use.Type, Use.Class, Use.Clodo,
Use.Rcs, Use.Via) #, Use.Id
ps = {}
for er in allers:
# p = er.classifier.partition(li.nodes)
p = [(av.kind, av) for av in li.by(er).partition]
for ak, av in p:
if ak in ps:
self.aseq( ps[ak], av)
else:
ps[ak] = av
for ak, av in ps.items():
self.aseq ( ak & li, av )
for bk, bv in ps.items():
# Test set operations by selection definition
self.aseq ( (ak & bk) & li, av & bv)
self.aseq ( (ak | bk) & li, av | bv)
self.aseq ( (ak - bk) & li, av - bv)
self.aseq ( (bk - ak) & li, bv - av)
self.aseq ( (ak ^ bk) & li, av ^ bv)
def test_main(testrender=1, debug=0):
if 0 or not debug:
support.run_unittest(BaseCase, debug)
if 1 or not debug:
support.run_unittest(ClassificationCase, debug)
if 0 or not debug:
support.run_unittest(LawsCase, debug)
if 0 or not debug:
support.run_unittest(NewCases, debug)
if 0 or (testrender and not debug):
support.run_unittest(RenderCase, debug)
if 0 or not debug:
support.run_unittest(SpecialCases, debug)
if 0 and __name__ == "__main__":
# It doesn't like to be run under name __main__,
# needs to have its actual module name.
import guppy.heapy.test.test_Classifiers as x
if 1:
reload(x)
x.test_main()
| apache-2.0 |
publicloudapp/csrutil | linux-4.3/scripts/gdb/linux/symbols.py | 588 | 6302 | #
# gdb helper commands and functions for Linux kernel debugging
#
# load kernel and module symbols
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
import os
import re
from linux import modules
if hasattr(gdb, 'Breakpoint'):
class LoadModuleBreakpoint(gdb.Breakpoint):
def __init__(self, spec, gdb_command):
super(LoadModuleBreakpoint, self).__init__(spec, internal=True)
self.silent = True
self.gdb_command = gdb_command
def stop(self):
module = gdb.parse_and_eval("mod")
module_name = module['name'].string()
cmd = self.gdb_command
# enforce update if object file is not found
cmd.module_files_updated = False
# Disable pagination while reporting symbol (re-)loading.
# The console input is blocked in this context so that we would
# get stuck waiting for the user to acknowledge paged output.
show_pagination = gdb.execute("show pagination", to_string=True)
pagination = show_pagination.endswith("on.\n")
gdb.execute("set pagination off")
if module_name in cmd.loaded_modules:
gdb.write("refreshing all symbols to reload module "
"'{0}'\n".format(module_name))
cmd.load_all_symbols()
else:
cmd.load_module_symbols(module)
# restore pagination state
gdb.execute("set pagination %s" % ("on" if pagination else "off"))
return False
class LxSymbols(gdb.Command):
"""(Re-)load symbols of Linux kernel and currently loaded modules.
The kernel (vmlinux) is taken from the current working directly. Modules (.ko)
are scanned recursively, starting in the same directory. Optionally, the module
search path can be extended by a space separated list of paths passed to the
lx-symbols command."""
module_paths = []
module_files = []
module_files_updated = False
loaded_modules = []
breakpoint = None
def __init__(self):
super(LxSymbols, self).__init__("lx-symbols", gdb.COMMAND_FILES,
gdb.COMPLETE_FILENAME)
def _update_module_files(self):
self.module_files = []
for path in self.module_paths:
gdb.write("scanning for modules in {0}\n".format(path))
for root, dirs, files in os.walk(path):
for name in files:
if name.endswith(".ko"):
self.module_files.append(root + "/" + name)
self.module_files_updated = True
def _get_module_file(self, module_name):
module_pattern = ".*/{0}\.ko$".format(
module_name.replace("_", r"[_\-]"))
for name in self.module_files:
if re.match(module_pattern, name) and os.path.exists(name):
return name
return None
def _section_arguments(self, module):
try:
sect_attrs = module['sect_attrs'].dereference()
except gdb.error:
return ""
attrs = sect_attrs['attrs']
section_name_to_address = {
attrs[n]['name'].string(): attrs[n]['address']
for n in range(int(sect_attrs['nsections']))}
args = []
for section_name in [".data", ".data..read_mostly", ".rodata", ".bss"]:
address = section_name_to_address.get(section_name)
if address:
args.append(" -s {name} {addr}".format(
name=section_name, addr=str(address)))
return "".join(args)
def load_module_symbols(self, module):
module_name = module['name'].string()
module_addr = str(module['module_core']).split()[0]
module_file = self._get_module_file(module_name)
if not module_file and not self.module_files_updated:
self._update_module_files()
module_file = self._get_module_file(module_name)
if module_file:
gdb.write("loading @{addr}: {filename}\n".format(
addr=module_addr, filename=module_file))
cmdline = "add-symbol-file {filename} {addr}{sections}".format(
filename=module_file,
addr=module_addr,
sections=self._section_arguments(module))
gdb.execute(cmdline, to_string=True)
if module_name not in self.loaded_modules:
self.loaded_modules.append(module_name)
else:
gdb.write("no module object found for '{0}'\n".format(module_name))
def load_all_symbols(self):
gdb.write("loading vmlinux\n")
# Dropping symbols will disable all breakpoints. So save their states
# and restore them afterward.
saved_states = []
if hasattr(gdb, 'breakpoints') and not gdb.breakpoints() is None:
for bp in gdb.breakpoints():
saved_states.append({'breakpoint': bp, 'enabled': bp.enabled})
# drop all current symbols and reload vmlinux
gdb.execute("symbol-file", to_string=True)
gdb.execute("symbol-file vmlinux")
self.loaded_modules = []
module_list = modules.module_list()
if not module_list:
gdb.write("no modules found\n")
else:
[self.load_module_symbols(module) for module in module_list]
for saved_state in saved_states:
saved_state['breakpoint'].enabled = saved_state['enabled']
def invoke(self, arg, from_tty):
self.module_paths = arg.split()
self.module_paths.append(os.getcwd())
# enforce update
self.module_files = []
self.module_files_updated = False
self.load_all_symbols()
if hasattr(gdb, 'Breakpoint'):
if self.breakpoint is not None:
self.breakpoint.delete()
self.breakpoint = None
self.breakpoint = LoadModuleBreakpoint(
"kernel/module.c:do_init_module", self)
else:
gdb.write("Note: symbol update on module loading not supported "
"with this gdb version\n")
LxSymbols()
| mit |
phoenixstar7/pmtk3 | python/demos/ch02/robustDemo.py | 7 | 1150 | #!/usr/bin/env python
import numpy as np
import matplotlib.pylab as pl
from scipy.stats import t, laplace, norm
a = np.random.randn(30)
outliers = np.array([8, 8.75, 9.5])
pl.hist(a, 7, weights=[1 / 30] * 30, rwidth=0.8)
#fit without outliers
x = np.linspace(-5, 10, 500)
loc, scale = norm.fit(a)
n = norm.pdf(x, loc=loc, scale=scale)
loc, scale = laplace.fit(a)
l = laplace.pdf(x, loc=loc, scale=scale)
fd, loc, scale = t.fit(a)
s = t.pdf(x, fd, loc=loc, scale=scale)
pl.plot(x, n, 'k>',
x, s, 'r-',
x, l, 'b--')
pl.legend(('Gauss', 'Student', 'Laplace'))
pl.savefig('robustDemo_without_outliers.png')
#add the outliers
pl.figure()
pl.hist(a, 7, weights=[1 / 33] * 30, rwidth=0.8)
pl.hist(outliers, 3, weights=[1 / 33] * 3, rwidth=0.8)
aa = np.hstack((a, outliers))
loc, scale = norm.fit(aa)
n = norm.pdf(x, loc=loc, scale=scale)
loc, scale = laplace.fit(aa)
l = laplace.pdf(x, loc=loc, scale=scale)
fd, loc, scale = t.fit(aa)
t = t.pdf(x, fd, loc=loc, scale=scale)
pl.plot(x, n, 'k:',
x, t, 'r-',
x, l, 'b--')
pl.legend(('Gauss', 'Student', 'Laplace'))
pl.savefig('robustDemo_with_outliers.png')
pl.show()
| mit |
40223211/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/_markupbase.py | 891 | 14598 | """Shared support for scanning document type declarations in HTML and XHTML.
This module is used as a foundation for the html.parser module. It has no
documented public API and should not be used directly.
"""
import re
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
_commentclose = re.compile(r'--\s*>')
_markedsectionclose = re.compile(r']\s*]\s*>')
# An analysis of the MS-Word extensions is available at
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
_msmarkedsectionclose = re.compile(r']\s*>')
del re
class ParserBase:
"""Parser base class which provides some common support methods used
by the SGML/HTML and XHTML parsers."""
def __init__(self):
if self.__class__ is ParserBase:
raise RuntimeError(
"_markupbase.ParserBase must be subclassed")
def error(self, message):
raise NotImplementedError(
"subclasses of ParserBase must override error()")
def reset(self):
self.lineno = 1
self.offset = 0
def getpos(self):
"""Return current line number and offset."""
return self.lineno, self.offset
# Internal -- update line number and offset. This should be
# called for each piece of data exactly once, in order -- in other
# words the concatenation of all the input strings to this
# function should be exactly the entire input.
def updatepos(self, i, j):
if i >= j:
return j
rawdata = self.rawdata
nlines = rawdata.count("\n", i, j)
if nlines:
self.lineno = self.lineno + nlines
pos = rawdata.rindex("\n", i, j) # Should not fail
self.offset = j-(pos+1)
else:
self.offset = self.offset + j-i
return j
_decl_otherchars = ''
# Internal -- parse declaration (for use by subclasses).
def parse_declaration(self, i):
# This is some sort of declaration; in "HTML as
# deployed," this should only be the document type
# declaration ("<!DOCTYPE html...>").
# ISO 8879:1986, however, has more complex
# declaration syntax for elements in <!...>, including:
# --comment--
# [marked section]
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
# ATTLIST, NOTATION, SHORTREF, USEMAP,
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
rawdata = self.rawdata
j = i + 2
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
if rawdata[j:j+1] == ">":
# the empty comment <!>
return j + 1
if rawdata[j:j+1] in ("-", ""):
# Start of comment followed by buffer boundary,
# or just a buffer boundary.
return -1
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
n = len(rawdata)
if rawdata[j:j+2] == '--': #comment
# Locate --.*-- as the body of the comment
return self.parse_comment(i)
elif rawdata[j] == '[': #marked section
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
# Note that this is extended by Microsoft Office "Save as Web" function
# to include [if...] and [endif].
return self.parse_marked_section(i)
else: #all other declaration elements
decltype, j = self._scan_name(j, i)
if j < 0:
return j
if decltype == "doctype":
self._decl_otherchars = ''
while j < n:
c = rawdata[j]
if c == ">":
# end of declaration syntax
data = rawdata[i+2:j]
if decltype == "doctype":
self.handle_decl(data)
else:
# According to the HTML5 specs sections "8.2.4.44 Bogus
# comment state" and "8.2.4.45 Markup declaration open
# state", a comment token should be emitted.
# Calling unknown_decl provides more flexibility though.
self.unknown_decl(data)
return j + 1
if c in "\"'":
m = _declstringlit_match(rawdata, j)
if not m:
return -1 # incomplete
j = m.end()
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
name, j = self._scan_name(j, i)
elif c in self._decl_otherchars:
j = j + 1
elif c == "[":
# this could be handled in a separate doctype parser
if decltype == "doctype":
j = self._parse_doctype_subset(j + 1, i)
elif decltype in {"attlist", "linktype", "link", "element"}:
# must tolerate []'d groups in a content model in an element declaration
# also in data attribute specifications of attlist declaration
# also link type declaration subsets in linktype declarations
# also link attribute specification lists in link declarations
self.error("unsupported '[' char in %s declaration" % decltype)
else:
self.error("unexpected '[' char in declaration")
else:
self.error(
"unexpected %r char in declaration" % rawdata[j])
if j < 0:
return j
return -1 # incomplete
# Internal -- parse a marked section
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
def parse_marked_section(self, i, report=1):
rawdata= self.rawdata
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
sectName, j = self._scan_name( i+3, i )
if j < 0:
return j
if sectName in {"temp", "cdata", "ignore", "include", "rcdata"}:
# look for standard ]]> ending
match= _markedsectionclose.search(rawdata, i+3)
elif sectName in {"if", "else", "endif"}:
# look for MS Office ]> ending
match= _msmarkedsectionclose.search(rawdata, i+3)
else:
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
if not match:
return -1
if report:
j = match.start(0)
self.unknown_decl(rawdata[i+3: j])
return match.end(0)
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i, report=1):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
self.error('unexpected call to parse_comment()')
match = _commentclose.search(rawdata, i+4)
if not match:
return -1
if report:
j = match.start(0)
self.handle_comment(rawdata[i+4: j])
return match.end(0)
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
# returning the index just past any whitespace following the trailing ']'.
def _parse_doctype_subset(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
j = i
while j < n:
c = rawdata[j]
if c == "<":
s = rawdata[j:j+2]
if s == "<":
# end of buffer; incomplete
return -1
if s != "<!":
self.updatepos(declstartpos, j + 1)
self.error("unexpected char in internal subset (in %r)" % s)
if (j + 2) == n:
# end of buffer; incomplete
return -1
if (j + 4) > n:
# end of buffer; incomplete
return -1
if rawdata[j:j+4] == "<!--":
j = self.parse_comment(j, report=0)
if j < 0:
return j
continue
name, j = self._scan_name(j + 2, declstartpos)
if j == -1:
return -1
if name not in {"attlist", "element", "entity", "notation"}:
self.updatepos(declstartpos, j + 2)
self.error(
"unknown declaration %r in internal subset" % name)
# handle the individual names
meth = getattr(self, "_parse_doctype_" + name)
j = meth(j, declstartpos)
if j < 0:
return j
elif c == "%":
# parameter entity reference
if (j + 1) == n:
# end of buffer; incomplete
return -1
s, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
if rawdata[j] == ";":
j = j + 1
elif c == "]":
j = j + 1
while j < n and rawdata[j].isspace():
j = j + 1
if j < n:
if rawdata[j] == ">":
return j
self.updatepos(declstartpos, j)
self.error("unexpected char after internal subset")
else:
return -1
elif c.isspace():
j = j + 1
else:
self.updatepos(declstartpos, j)
self.error("unexpected char %r in internal subset" % c)
# end of buffer reached
return -1
# Internal -- scan past <!ELEMENT declarations
def _parse_doctype_element(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j == -1:
return -1
# style content model; just skip until '>'
rawdata = self.rawdata
if '>' in rawdata[j:]:
return rawdata.find(">", j) + 1
return -1
# Internal -- scan past <!ATTLIST declarations
def _parse_doctype_attlist(self, i, declstartpos):
rawdata = self.rawdata
name, j = self._scan_name(i, declstartpos)
c = rawdata[j:j+1]
if c == "":
return -1
if c == ">":
return j + 1
while 1:
# scan a series of attribute descriptions; simplified:
# name type [value] [#constraint]
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if c == "":
return -1
if c == "(":
# an enumerated type; look for ')'
if ")" in rawdata[j:]:
j = rawdata.find(")", j) + 1
else:
return -1
while rawdata[j:j+1].isspace():
j = j + 1
if not rawdata[j:]:
# end of buffer, incomplete
return -1
else:
name, j = self._scan_name(j, declstartpos)
c = rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1
c = rawdata[j:j+1]
if not c:
return -1
if c == "#":
if rawdata[j:] == "#":
# end of buffer
return -1
name, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if not c:
return -1
if c == '>':
# all done
return j + 1
# Internal -- scan past <!NOTATION declarations
def _parse_doctype_notation(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j < 0:
return j
rawdata = self.rawdata
while 1:
c = rawdata[j:j+1]
if not c:
# end of buffer; incomplete
return -1
if c == '>':
return j + 1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if not m:
return -1
j = m.end()
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan past <!ENTITY declarations
def _parse_doctype_entity(self, i, declstartpos):
rawdata = self.rawdata
if rawdata[i:i+1] == "%":
j = i + 1
while 1:
c = rawdata[j:j+1]
if not c:
return -1
if c.isspace():
j = j + 1
else:
break
else:
j = i
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
while 1:
c = self.rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1 # incomplete
elif c == ">":
return j + 1
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan a name token and the new position and the token, or
# return -1 if we've reached the end of the buffer.
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = _declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.updatepos(declstartpos, i)
self.error("expected name token at %r"
% rawdata[declstartpos:declstartpos+20])
# To be overridden -- handlers for unknown objects
def unknown_decl(self, data):
pass
| agpl-3.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/express_route_service_provider_bandwidths_offered_py3.py | 10 | 1207 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteServiceProviderBandwidthsOffered(Model):
"""Contains bandwidths offered in ExpressRouteServiceProvider resources.
:param offer_name: The OfferName.
:type offer_name: str
:param value_in_mbps: The ValueInMbps.
:type value_in_mbps: int
"""
_attribute_map = {
'offer_name': {'key': 'offerName', 'type': 'str'},
'value_in_mbps': {'key': 'valueInMbps', 'type': 'int'},
}
def __init__(self, *, offer_name: str=None, value_in_mbps: int=None, **kwargs) -> None:
super(ExpressRouteServiceProviderBandwidthsOffered, self).__init__(**kwargs)
self.offer_name = offer_name
self.value_in_mbps = value_in_mbps
| mit |
ltilve/ChromiumGStreamerBackend | build/android/gyp/emma_instr.py | 101 | 7458 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Instruments classes and jar files.
This script corresponds to the 'emma_instr' action in the java build process.
Depending on whether emma_instrument is set, the 'emma_instr' action will either
call one of the instrument commands, or the copy command.
Possible commands are:
- instrument_jar: Accepts a jar and instruments it using emma.jar.
- instrument_classes: Accepts a directory containing java classes and
instruments it using emma.jar.
- copy: Called when EMMA coverage is not enabled. This allows us to make
this a required step without necessarily instrumenting on every build.
Also removes any stale coverage files.
"""
import collections
import json
import os
import shutil
import sys
import tempfile
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
from pylib.utils import command_option_parser
from util import build_utils
def _AddCommonOptions(option_parser):
"""Adds common options to |option_parser|."""
option_parser.add_option('--input-path',
help=('Path to input file(s). Either the classes '
'directory, or the path to a jar.'))
option_parser.add_option('--output-path',
help=('Path to output final file(s) to. Either the '
'final classes directory, or the directory in '
'which to place the instrumented/copied jar.'))
option_parser.add_option('--stamp', help='Path to touch when done.')
option_parser.add_option('--coverage-file',
help='File to create with coverage metadata.')
option_parser.add_option('--sources-file',
help='File to create with the list of sources.')
def _AddInstrumentOptions(option_parser):
"""Adds options related to instrumentation to |option_parser|."""
_AddCommonOptions(option_parser)
option_parser.add_option('--sources',
help='Space separated list of sources.')
option_parser.add_option('--src-root',
help='Root of the src repository.')
option_parser.add_option('--emma-jar',
help='Path to emma.jar.')
option_parser.add_option(
'--filter-string', default='',
help=('Filter string consisting of a list of inclusion/exclusion '
'patterns separated with whitespace and/or comma.'))
def _RunCopyCommand(_command, options, _, option_parser):
"""Copies the jar from input to output locations.
Also removes any old coverage/sources file.
Args:
command: String indicating the command that was received to trigger
this function.
options: optparse options dictionary.
args: List of extra args from optparse.
option_parser: optparse.OptionParser object.
Returns:
An exit code.
"""
if not (options.input_path and options.output_path and
options.coverage_file and options.sources_file):
option_parser.error('All arguments are required.')
coverage_file = os.path.join(os.path.dirname(options.output_path),
options.coverage_file)
sources_file = os.path.join(os.path.dirname(options.output_path),
options.sources_file)
if os.path.exists(coverage_file):
os.remove(coverage_file)
if os.path.exists(sources_file):
os.remove(sources_file)
if os.path.isdir(options.input_path):
shutil.rmtree(options.output_path, ignore_errors=True)
shutil.copytree(options.input_path, options.output_path)
else:
shutil.copy(options.input_path, options.output_path)
if options.stamp:
build_utils.Touch(options.stamp)
def _CreateSourcesFile(sources_string, sources_file, src_root):
"""Adds all normalized source directories to |sources_file|.
Args:
sources_string: String generated from gyp containing the list of sources.
sources_file: File into which to write the JSON list of sources.
src_root: Root which sources added to the file should be relative to.
Returns:
An exit code.
"""
src_root = os.path.abspath(src_root)
sources = build_utils.ParseGypList(sources_string)
relative_sources = []
for s in sources:
abs_source = os.path.abspath(s)
if abs_source[:len(src_root)] != src_root:
print ('Error: found source directory not under repository root: %s %s'
% (abs_source, src_root))
return 1
rel_source = os.path.relpath(abs_source, src_root)
relative_sources.append(rel_source)
with open(sources_file, 'w') as f:
json.dump(relative_sources, f)
def _RunInstrumentCommand(command, options, _, option_parser):
"""Instruments the classes/jar files using EMMA.
Args:
command: 'instrument_jar' or 'instrument_classes'. This distinguishes
whether we copy the output from the created lib/ directory, or classes/
directory.
options: optparse options dictionary.
args: List of extra args from optparse.
option_parser: optparse.OptionParser object.
Returns:
An exit code.
"""
if not (options.input_path and options.output_path and
options.coverage_file and options.sources_file and options.sources and
options.src_root and options.emma_jar):
option_parser.error('All arguments are required.')
coverage_file = os.path.join(os.path.dirname(options.output_path),
options.coverage_file)
sources_file = os.path.join(os.path.dirname(options.output_path),
options.sources_file)
if os.path.exists(coverage_file):
os.remove(coverage_file)
temp_dir = tempfile.mkdtemp()
try:
cmd = ['java', '-cp', options.emma_jar,
'emma', 'instr',
'-ip', options.input_path,
'-ix', options.filter_string,
'-d', temp_dir,
'-out', coverage_file,
'-m', 'fullcopy']
build_utils.CheckOutput(cmd)
if command == 'instrument_jar':
for jar in os.listdir(os.path.join(temp_dir, 'lib')):
shutil.copy(os.path.join(temp_dir, 'lib', jar),
options.output_path)
else: # 'instrument_classes'
if os.path.isdir(options.output_path):
shutil.rmtree(options.output_path, ignore_errors=True)
shutil.copytree(os.path.join(temp_dir, 'classes'),
options.output_path)
finally:
shutil.rmtree(temp_dir)
_CreateSourcesFile(options.sources, sources_file, options.src_root)
if options.stamp:
build_utils.Touch(options.stamp)
return 0
CommandFunctionTuple = collections.namedtuple(
'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
VALID_COMMANDS = {
'copy': CommandFunctionTuple(_AddCommonOptions,
_RunCopyCommand),
'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
_RunInstrumentCommand),
'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions,
_RunInstrumentCommand),
}
def main():
option_parser = command_option_parser.CommandOptionParser(
commands_dict=VALID_COMMANDS)
command_option_parser.ParseAndExecute(option_parser)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
Vitallium/qtwebkit | Source/WebKit2/Scripts/generate-message-receiver.py | 145 | 1809 | #!/usr/bin/env python
#
# Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import with_statement
import sys
import webkit2.messages
def main(argv=None):
if not argv:
argv = sys.argv
input_path = argv[1]
with open(input_path) as input_file:
# Python 3, change to: print(webkit2.messages.generate_message_handler(input_file), end='')
sys.stdout.write(webkit2.messages.generate_message_handler(input_file))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-2.0 |
compas-dev/compas | src/compas/numerical/topop/topop_numpy.py | 1 | 5846 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from numpy import abs
from numpy import asarray
from numpy import array
from numpy import ceil
from numpy import dot
from numpy import hstack
from numpy import kron
from numpy import max
from numpy import maximum
from numpy import min
from numpy import minimum
from numpy import int64
from numpy import newaxis
from numpy import ones
from numpy import ravel
from numpy import reshape
from numpy import sqrt
from numpy import squeeze
from numpy import sum
from numpy import tile
from numpy import vstack
from numpy import zeros
from scipy.sparse import coo_matrix
from scipy.sparse.linalg import spsolve
__all__ = ['topop_numpy']
def topop_numpy(nelx, nely, loads, supports, volfrac=0.5, penal=3, rmin=1.5, callback=None):
"""Topology optimisation in 2D.
Parameters
----------
nelx : int
Number of elements in x.
nely : int
Number of elements in y.
loads : dict
{'i-j': [Px, Py]}.
supports : dict
{'i-j': [Bx, By]} 1=fixed, 0=free.
volfrac : float
Volume fraction.
penal : float
Penalisation power.
rmin : float
Filter radius.
Returns
-------
array
Density array.
Examples
--------
>>>
"""
if callback and not callable(callback):
raise Exception("The provided callback is not callable.")
nx = nelx + 1
ny = nely + 1
nn = nx * ny
ne = nelx * nely
ndof = 2 * nn
dv = ones((nely, nelx))
# Finite element analysis
v = 0.3
E = 1.
Emin = 10**(-10)
A11 = array([[12, +3, -6, -3], [+3, 12, +3, +0], [-6, +3, 12, -3], [-3, +0, -3, 12]])
A12 = array([[-6, -3, +0, +3], [-3, -6, -3, -6], [+0, -3, -6, +3], [+3, -6, +3, -6]])
B11 = array([[-4, +3, -2, +9], [+3, -4, -9, +4], [-2, -9, -4, -3], [+9, +4, -3, -4]])
B12 = array([[+2, -3, +4, -9], [-3, +2, +9, -2], [+4, +9, +2, +3], [-9, -2, +3, +2]])
A21 = A12.transpose()
B21 = B12.transpose()
A = vstack([hstack([A11, A12]), hstack([A21, A11])])
B = vstack([hstack([B11, B12]), hstack([B21, B11])])
Ke = 1 / (1 - v**2) / 24 * (A + v * B)
Ker = ravel(Ke, order='F')[:, newaxis]
nodes = reshape(range(1, nn + 1), (ny, nx), order='F')
eVec = tile(reshape(2 * nodes[:-1, :-1], (ne, 1), order='F'), (1, 8))
edof = eVec + tile(hstack([array([0, 1]), 2 * nely + array([2, 3, 0, 1]), array([-2, -1])]), (ne, 1))
iK = reshape(kron(edof, ones((8, 1))).transpose(), (64 * ne), order='F')
jK = reshape(kron(edof, ones((1, 8))).transpose(), (64 * ne), order='F')
# Supports
U = zeros((ndof, 1))
fixed = []
for support, B in supports.items():
jb, ib = [int(i) for i in support.split('-')]
Bx, By = B
node = int(jb * ny + ib)
if Bx:
fixed.append(2 * node)
if By:
fixed.append(2 * node + 1)
free = list(set(range(ndof)) - set(fixed))
# Loads
data = []
rows = []
cols = []
for load, P in loads.items():
jp, ip = [int(i) for i in load.split('-')]
Px, Py = P
node = int(jp * ny + ip)
data.extend([Px, Py])
rows.extend([2 * node, 2 * node + 1])
cols.extend([0, 0])
F = coo_matrix((data, (rows, cols)), shape=(ndof, 1))
Find = F.tocsr()[free]
# Filter
iH = zeros(ne * (2 * (int(ceil(rmin)) - 1) + 1)**2, dtype=int64)
jH = zeros(iH.shape, dtype=int64)
sH = zeros(iH.shape)
k = 0
for i1 in range(nelx):
max_i = int(max([i1 - (ceil(rmin) - 1), 0]))
min_i = int(min([i1 + (ceil(rmin) - 1), nelx - 1]))
for j1 in range(nely):
max_j = int(max([j1 - (ceil(rmin) - 1), 0]))
min_j = int(min([j1 + (ceil(rmin) - 1), nely - 1]))
e1 = i1 * nely + j1
for i2 in range(max_i, min_i + 1):
for j2 in range(max_j, min_j + 1):
k += 1
e2 = i2 * nely + j2
iH[k] = e1
jH[k] = e2
sH[k] = max([0, rmin - sqrt((i1 - i2)**2 + (j1 - j2)**2)])
H = coo_matrix((sH, (iH, jH)))
Hs = sum(H.toarray(), 1)
# Main loop
iteration = 0
change = 1
move = 0.2
x = tile(volfrac, (nely, nelx))
xP = x * 1.
nones = ones((ne)) * 0.001
while change > 0.1:
# FE
xrav = ravel(xP, order='F').transpose()
sK = reshape(Ker * (Emin + xrav**penal * (E - Emin)), (64 * ne), order='F')
K = coo_matrix((sK, (asarray(iK, dtype=int64), asarray(jK, dtype=int64)))).tocsr()
Kind = (K.tocsc()[:, free]).tocsr()[free, :]
U[free] = spsolve(Kind, Find)[:, newaxis]
# Objective function
ce = reshape(sum(dot(squeeze(U[edof]), Ke) * squeeze(U[edof]), 1), (nely, nelx), order='F')
c = sum(sum((Emin + xP**penal * (E - Emin)) * ce))
dc = -penal * (E - Emin) * xP**(penal - 1) * ce
xdc = squeeze(H.dot(ravel(x * dc, order='F')[:, newaxis]))
dc = reshape(xdc / Hs / maximum(nones, ravel(x, order='F')), (nely, nelx), order='F')
# Lagrange mulipliers
l1 = 0
l2 = 10**9
while (l2 - l1) / (l1 + l2) > 0.001:
lmid = 0.5 * (l2 + l1)
sdv = sqrt(-dc / dv / lmid)
min1 = minimum(x + move, x * sdv)
xn = maximum(0, maximum(x - move, minimum(1, min1)))
xP = xn * 1.
if sum(xP) > volfrac * ne:
l1 = lmid
else:
l2 = lmid
change = max(abs(xn - x))
# Update
x = xn * 1.
iteration += 1
print('Iteration: {0} Compliance: {1:.4g}'.format(iteration, c))
if callback:
callback(x)
return x
| mit |
virneo/nupic | examples/opf/experiments/spatial_classification/category_0/description.py | 32 | 1598 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
import os
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config = \
{
'dataSource': 'file://' + os.path.join(os.path.dirname(__file__),
'../datasets/category_0.csv'),
'errorMetric': 'avg_err',
'modelParams': {
'sensorParams': { 'verbosity': 0},
'clParams': {
'clVerbosity': 0,
},
}
}
mod = importBaseDescription('../base/description.py', config)
locals().update(mod.__dict__)
| agpl-3.0 |
DreamSourceLab/DSView | libsigrokdecode4DSL/decoders/ir_nec/lists.py | 5 | 1471 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Uwe Hermann <uwe@hermann-uwe.de>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
# Addresses/devices. Items that are not listed are reserved/unknown.
address = {
0x40: 'Matsui TV',
}
digits = {
0: ['0', '0'],
1: ['1', '1'],
2: ['2', '2'],
3: ['3', '3'],
4: ['4', '4'],
5: ['5', '5'],
6: ['6', '6'],
7: ['7', '7'],
8: ['8', '8'],
9: ['9', '9'],
}
# Commands. Items that are not listed are reserved/unknown.
command = {
0x40: dict(list(digits.items()) + list({
11: ['-/--', '-/--'],
16: ['Mute', 'M'],
18: ['Standby', 'StBy'],
26: ['Volume up', 'Vol+'],
27: ['Program up', 'P+'],
30: ['Volume down', 'Vol-'],
31: ['Program down', 'P-'],
68: ['AV', 'AV'],
}.items())),
}
| gpl-3.0 |
lsaffre/lino-welfare | lino_welfare/projects/gerd/tests/test_cbss.py | 1 | 8037 | # -*- coding: utf-8 -*-
# Copyright 2012-2013 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""
This module contains "quick" tests that are run on a demo database
without any fixture. You can run only these tests by issuing::
$ python manage.py test cbss.QuickTest
$ django-admin.py test --settings=lino_welfare.projects.eupen.settings.demo cbss.QuickTest
"""
from builtins import str
import datetime
import logging
logger = logging.getLogger(__name__)
#~ from django.utils import unittest
#~ from django.test.client import Client
from django.conf import settings
from django.core.exceptions import ValidationError
from lino.utils.djangotest import TestCase
from lino_welfare.modlib.cbss import models as cbss
from lino.utils import IncompleteDate
from lino.utils.instantiator import create_and_get
from lino.api import rt
NOW = datetime.datetime(2015, 5, 11, 18, 31, 1)
class QuickTest(TestCase):
never_build_site_cache = False
fixtures = 'sectors purposes democfg'.split()
def test01(self):
# print("20180502 test_cbss.test01()")
settings.SITE.startup() # create cache/wsdl files
root = create_and_get(settings.SITE.user_model, username='root')
luc = create_and_get(
rt.models.pcsw.Client, first_name='Luc', last_name='Saffre')
# First IdentifyPersonRequest
# Create an IPR with NISS just to have the XML validated.
req = cbss.IdentifyPersonRequest(
national_id="70100853190", user=root, person=luc)
try:
req.full_clean()
self.fail('Expected ValidationError "birth_date cannot be blank."')
except ValidationError:
pass
req.birth_date = IncompleteDate(1938, 6, 1)
try:
req.validate_request()
except Warning as e:
self.assertEqual(str(e), "")
pass
req.birth_date = IncompleteDate(1938, 0, 0)
req.validate_request()
req.execute_request(simulate_response='Foo', now=NOW)
expected = """\
<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>IdentifyPersonRequest # 1</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASIdentifyPerson</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<ipr:IdentifyPersonRequest xmlns:ipr="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/IdentifyPerson">
<ipr:SearchCriteria>
<ipr:SSIN>70100853190</ipr:SSIN>
<ipr:PhoneticCriteria>
<ipr:LastName></ipr:LastName>
<ipr:FirstName></ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-00-00</ipr:BirthDate>
</ipr:PhoneticCriteria>
</ipr:SearchCriteria>
<ipr:VerificationData>
<ipr:PersonData>
<ipr:LastName></ipr:LastName>
<ipr:FirstName></ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-00-00</ipr:BirthDate>
</ipr:PersonData>
</ipr:VerificationData>
</ipr:IdentifyPersonRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>"""
self.assertEquivalent(expected, req.request_xml)
##
req = cbss.IdentifyPersonRequest(
last_name="MUSTERMANN",
birth_date=IncompleteDate(1938, 0, 0))
req.validate_request()
# Create another one, this time a name search.
# This time we also inspect the generated XML.
req = cbss.IdentifyPersonRequest(
user=root, person=luc,
last_name="MUSTERMANN",
first_name="Max",
birth_date=IncompleteDate(1938, 6, 1))
req.validate_request()
req.execute_request(simulate_response='Foo', now=NOW)
expected = """\
<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>IdentifyPersonRequest # 2</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASIdentifyPerson</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<ipr:IdentifyPersonRequest xmlns:ipr="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/IdentifyPerson">
<ipr:SearchCriteria>
<ipr:PhoneticCriteria>
<ipr:LastName>MUSTERMANN</ipr:LastName>
<ipr:FirstName>Max</ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-06-01</ipr:BirthDate>
</ipr:PhoneticCriteria>
</ipr:SearchCriteria>
</ipr:IdentifyPersonRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>"""
self.assertEquivalent(expected, req.request_xml)
if settings.SITE.plugins.cbss.cbss_environment != 'test':
# Skip live tests unless we are in test environment.
# Otherwise we would have to build /media/chache/wsdl files
return
# Execute a RetrieveTIGroupsRequest.
req = cbss.RetrieveTIGroupsRequest(
user=root, person=luc,
national_id='12345678901', language='fr')
# Try it without environment and see the XML.
# Note that NewStyleRequests have no validate_request method.
req.execute_request(simulate_response='Foo', now=NOW)
expected = ""
self.assertEquivalent(expected, req.request_xml)
# Now a ManageAccessRequest
today = datetime.date(2012, 5, 24)
kw = dict()
# dossier in onderzoek voor een maximale periode van twee maanden
kw.update(purpose_id=1)
kw.update(national_id='68060105329')
kw.update(user=root)
kw.update(person=luc)
kw.update(start_date=today)
kw.update(end_date=today)
kw.update(action=cbss.ManageActions.REGISTER)
kw.update(query_register=cbss.QueryRegisters.SECONDARY)
#~ kw.update(id_card_no=)
kw.update(last_name='SAFFRE')
kw.update(first_name='LUC JOHANNES')
kw.update(birth_date=IncompleteDate(1968, 6, 1))
req = cbss.ManageAccessRequest(**kw)
req.execute_request(simulate_response='Foo', now=NOW)
expected = """<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>ManageAccessRequest # 1</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASManageAccess</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<mar:ManageAccessRequest xmlns:mar="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/ManageAccess">
<mar:SSIN>68060105329</mar:SSIN>
<mar:Purpose>10</mar:Purpose>
<mar:Period>
<common:StartDate xmlns:common="http://www.ksz-bcss.fgov.be/XSD/SSDN/Common">2012-05-24</common:StartDate>
<common:EndDate xmlns:common="http://www.ksz-bcss.fgov.be/XSD/SSDN/Common">2012-05-24</common:EndDate>
</mar:Period>
<mar:Action>REGISTER</mar:Action>
<mar:Sector>17</mar:Sector>
<mar:QueryRegister>SECONDARY</mar:QueryRegister>
<mar:ProofOfAuthentication>
<mar:PersonData>
<mar:LastName>SAFFRE</mar:LastName>
<mar:FirstName>LUC JOHANNES</mar:FirstName>
<mar:BirthDate>1968-06-01</mar:BirthDate>
</mar:PersonData>
</mar:ProofOfAuthentication>
</mar:ManageAccessRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>
"""
self.assertEquivalent(expected, req.request_xml)
| agpl-3.0 |
pekeler/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py | 17 | 20089 | # Color Editor originally by Neil Hodgson, but restructured by mh to integrate
# even tighter into Pythonwin.
import win32ui
import win32con
import win32api
import sys
import pywin.scintilla.keycodes
from pywin.scintilla import bindings
from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat
#from pywin.framework.editor import EditorPropertyPage
MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and editor.py
# Define a few common markers
MARKER_BOOKMARK = 0
MARKER_BREAKPOINT = 1
MARKER_CURRENT = 2
# XXX - copied from debugger\dbgcon.py
DBGSTATE_NOT_DEBUGGING = 0
DBGSTATE_RUNNING = 1
DBGSTATE_BREAK = 2
from pywin.scintilla.document import CScintillaDocument
from pywin.framework.editor.document import EditorDocumentBase
from pywin.scintilla.scintillacon import * # For the marker definitions
import pywin.scintilla.view
class SyntEditDocument(EditorDocumentBase):
"A SyntEdit document. "
def OnDebuggerStateChange(self, state):
self._ApplyOptionalToViews("OnDebuggerStateChange", state)
def HookViewNotifications(self, view):
EditorDocumentBase.HookViewNotifications(self, view)
view.SCISetUndoCollection(1)
def FinalizeViewCreation(self, view):
EditorDocumentBase.FinalizeViewCreation(self, view)
if view==self.GetFirstView():
self.GetDocTemplate().CheckIDLEMenus(view.idle)
SyntEditViewParent=pywin.scintilla.view.CScintillaView
class SyntEditView(SyntEditViewParent):
"A view of a SyntEdit. Obtains data from document."
def __init__(self, doc):
SyntEditViewParent.__init__(self, doc)
self.bCheckingFile = 0
def OnInitialUpdate(self):
SyntEditViewParent.OnInitialUpdate(self)
self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
for id in [win32ui.ID_VIEW_FOLD_COLLAPSE, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL,
win32ui.ID_VIEW_FOLD_EXPAND, win32ui.ID_VIEW_FOLD_EXPAND_ALL]:
self.HookCommand(self.OnCmdViewFold, id)
self.HookCommandUpdate(self.OnUpdateViewFold, id)
self.HookCommand(self.OnCmdViewFoldTopLevel, win32ui.ID_VIEW_FOLD_TOPLEVEL)
# Define the markers
# self.SCIMarkerDeleteAll()
self.SCIMarkerDefineAll(MARKER_BOOKMARK, SC_MARK_ROUNDRECT, win32api.RGB(0x0, 0x0, 0x0), win32api.RGB(0, 0xff, 0xff))
self.SCIMarkerDefine(MARKER_CURRENT, SC_MARK_ARROW)
self.SCIMarkerSetBack(MARKER_CURRENT, win32api.RGB(0xff, 0xff, 0x00))
# Define the folding markers
if 1: #traditional markers
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPEN, SC_MARK_MINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDER, SC_MARK_PLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERSUB, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERTAIL, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEREND, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPENMID, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERMIDTAIL, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
else: # curved markers
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPEN, SC_MARK_CIRCLEMINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDER, SC_MARK_CIRCLEPLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERSUB, SC_MARK_VLINE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERTAIL, SC_MARK_LCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEREND, SC_MARK_CIRCLEPLUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPENMID, SC_MARK_CIRCLEMINUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERMIDTAIL, SC_MARK_TCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
self.SCIMarkerDefine(MARKER_BREAKPOINT, SC_MARK_CIRCLE)
# Marker background depends on debugger state
self.SCIMarkerSetFore(MARKER_BREAKPOINT, win32api.RGB(0x0, 0, 0))
# Get the current debugger state.
try:
import pywin.debugger
if pywin.debugger.currentDebugger is None:
state = DBGSTATE_NOT_DEBUGGING
else:
state = pywin.debugger.currentDebugger.debuggerState
except ImportError:
state = DBGSTATE_NOT_DEBUGGING
self.OnDebuggerStateChange(state)
def _GetSubConfigNames(self):
return ["editor"] # Allow [Keys:Editor] sections to be specific to us
def DoConfigChange(self):
SyntEditViewParent.DoConfigChange(self)
tabSize = GetEditorOption("Tab Size", 4, 2)
indentSize = GetEditorOption("Indent Size", 4, 2)
bUseTabs = GetEditorOption("Use Tabs", 0)
bSmartTabs = GetEditorOption("Smart Tabs", 1)
ext = self.idle.IDLEExtension("AutoIndent") # Required extension.
self.SCISetViewWS( GetEditorOption("View Whitespace", 0) )
self.SCISetViewEOL( GetEditorOption("View EOL", 0) )
self.SCISetIndentationGuides( GetEditorOption("View Indentation Guides", 0) )
if GetEditorOption("Right Edge Enabled", 0):
mode = EDGE_BACKGROUND
else:
mode = EDGE_NONE
self.SCISetEdgeMode(mode)
self.SCISetEdgeColumn( GetEditorOption("Right Edge Column", 75) )
self.SCISetEdgeColor( GetEditorOption("Right Edge Color", win32api.RGB(0xef, 0xef, 0xef)))
width = GetEditorOption("Marker Margin Width", 16)
self.SCISetMarginWidthN(1, width)
width = GetEditorOption("Folding Margin Width", 12)
self.SCISetMarginWidthN(2, width)
width = GetEditorOption("Line Number Margin Width", 0)
self.SCISetMarginWidthN(0, width)
self.bFolding = GetEditorOption("Enable Folding", 1)
fold_flags = 0
self.SendScintilla(SCI_SETMODEVENTMASK, SC_MOD_CHANGEFOLD);
if self.bFolding:
if GetEditorOption("Fold Lines", 1):
fold_flags = 16
self.SCISetProperty("fold", self.bFolding)
self.SCISetFoldFlags(fold_flags)
tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xff, 0, 0))
self.SendScintilla(SCI_INDICSETFORE, 1, tt_color)
tt_use = GetEditorOption("Use Tab Timmy", 1)
if tt_use:
self.SCISetProperty("tab.timmy.whinge.level", "1")
# Auto-indent has very complicated behaviour. In a nutshell, the only
# way to get sensible behaviour from it is to ensure tabwidth != indentsize.
# Further, usetabs will only ever go from 1->0, never 0->1.
# This is _not_ the behaviour Pythonwin wants:
# * Tab width is arbitary, so should have no impact on smarts.
# * bUseTabs setting should reflect how new files are created, and
# if Smart Tabs disabled, existing files are edited
# * If "Smart Tabs" is enabled, bUseTabs should have no bearing
# for existing files (unless of course no context can be determined)
#
# So for smart tabs we configure the widget with completely dummy
# values (ensuring tabwidth != indentwidth), ask it to guess, then
# look at the values it has guessed, and re-configure
if bSmartTabs:
ext.config(usetabs=1, tabwidth=5, indentwidth=4)
ext.set_indentation_params(1)
if ext.indentwidth==5:
# Either 5 literal spaces, or a single tab character. Assume a tab
usetabs = 1
indentwidth = tabSize
else:
# Either Indented with spaces, and indent size has been guessed or
# an empty file (or no context found - tough!)
if self.GetTextLength()==0: # emtpy
usetabs = bUseTabs
indentwidth = indentSize
else: # guessed.
indentwidth = ext.indentwidth
usetabs = 0
# Tab size can never be guessed - set at user preference.
ext.config(usetabs=usetabs, indentwidth=indentwidth, tabwidth=tabSize)
else:
# Dont want smart-tabs - just set the options!
ext.config(usetabs=bUseTabs, tabwidth=tabSize, indentwidth=indentSize)
self.SCISetIndent(indentSize)
self.SCISetTabWidth(tabSize)
def OnDebuggerStateChange(self, state):
if state == DBGSTATE_NOT_DEBUGGING:
# Indicate breakpoints arent really usable.
# Not quite white - useful when no marker margin, so set as background color.
self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xef, 0xef, 0xef))
else:
# A light-red, so still readable when no marker margin.
self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xff, 0x80, 0x80))
def HookDocumentHandlers(self):
SyntEditViewParent.HookDocumentHandlers(self)
self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE)
def HookHandlers(self):
SyntEditViewParent.HookHandlers(self)
self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
def _PrepareUserStateChange(self):
return self.GetSel(), self.GetFirstVisibleLine()
def _EndUserStateChange(self, info):
scrollOff = info[1] - self.GetFirstVisibleLine()
if scrollOff:
self.LineScroll(scrollOff)
# Make sure we dont reset the cursor beyond the buffer.
max = self.GetTextLength()
newPos = min(info[0][0], max), min(info[0][1], max)
self.SetSel(newPos)
#######################################
# The Windows Message or Notify handlers.
#######################################
def OnMarginClick(self, std, extra):
notify = self.SCIUnpackNotifyMessage(extra)
if notify.margin==2: # Our fold margin
line_click = self.LineFromChar(notify.position)
# max_line = self.GetLineCount()
if self.SCIGetFoldLevel(line_click) & SC_FOLDLEVELHEADERFLAG:
# If a fold point.
self.SCIToggleFold(line_click)
return 1
def OnSetFocus(self,msg):
# Even though we use file change notifications, we should be very sure about it here.
self.OnCheckExternalDocumentUpdated(msg)
return 1
def OnCheckExternalDocumentUpdated(self, msg):
if self.bCheckingFile: return
self.bCheckingFile = 1
self.GetDocument().CheckExternalDocumentUpdated()
self.bCheckingFile = 0
def OnRClick(self,params):
menu = win32ui.CreatePopupMenu()
self.AppendMenu(menu, "&Locate module", "LocateModule")
self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
self.AppendMenu(menu, "&Undo", "EditUndo")
self.AppendMenu(menu, '&Redo', 'EditRedo')
self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
self.AppendMenu(menu, 'Cu&t', 'EditCut')
self.AppendMenu(menu, '&Copy', 'EditCopy')
self.AppendMenu(menu, '&Paste', 'EditPaste')
self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
self.AppendMenu(menu, '&Select all', 'EditSelectAll')
self.AppendMenu(menu, 'View &Whitespace', 'ViewWhitespace', checked=self.SCIGetViewWS())
self.AppendMenu(menu, "&Fixed Font", "ViewFixedFont", checked = self._GetColorizer().bUseFixed)
self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
self.AppendMenu(menu, "&Goto line...", "GotoLine")
submenu = win32ui.CreatePopupMenu()
newitems = self.idle.GetMenuItems("edit")
for text, event in newitems:
self.AppendMenu(submenu, text, event)
flags=win32con.MF_STRING|win32con.MF_ENABLED|win32con.MF_POPUP
menu.AppendMenu(flags, submenu.GetHandle(), "&Source code")
flags = win32con.TPM_LEFTALIGN|win32con.TPM_LEFTBUTTON|win32con.TPM_RIGHTBUTTON
menu.TrackPopupMenu(params[5], flags, self)
return 0
def OnCmdViewFold(self, cid, code): # Handle the menu command
if cid == win32ui.ID_VIEW_FOLD_EXPAND_ALL:
self.FoldExpandAllEvent(None)
elif cid == win32ui.ID_VIEW_FOLD_EXPAND:
self.FoldExpandEvent(None)
elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE_ALL:
self.FoldCollapseAllEvent(None)
elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE:
self.FoldCollapseEvent(None)
else:
print "Unknown collapse/expand ID"
def OnUpdateViewFold(self, cmdui): # Update the tick on the UI.
if not self.bFolding:
cmdui.Enable(0)
return
id = cmdui.m_nID
if id in [win32ui.ID_VIEW_FOLD_EXPAND_ALL, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL]:
cmdui.Enable()
else:
enable = 0
lineno = self.LineFromChar(self.GetSel()[0])
foldable = self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG
is_expanded = self.SCIGetFoldExpanded(lineno)
if id == win32ui.ID_VIEW_FOLD_EXPAND:
if foldable and not is_expanded:
enable = 1
elif id == win32ui.ID_VIEW_FOLD_COLLAPSE:
if foldable and is_expanded:
enable = 1
cmdui.Enable(enable)
def OnCmdViewFoldTopLevel(self, cid, code): # Handle the menu command
self.FoldTopLevelEvent(None)
#######################################
# The Events
#######################################
def ToggleBookmarkEvent(self, event, pos = -1):
"""Toggle a bookmark at the specified or current position
"""
if pos==-1:
pos, end = self.GetSel()
startLine = self.LineFromChar(pos)
self.GetDocument().MarkerToggle(startLine+1, MARKER_BOOKMARK)
return 0
def GotoNextBookmarkEvent(self, event, fromPos=-1):
""" Move to the next bookmark
"""
if fromPos==-1:
fromPos, end = self.GetSel()
startLine = self.LineFromChar(fromPos)+1 # Zero based line to start
nextLine = self.GetDocument().MarkerGetNext(startLine+1, MARKER_BOOKMARK)-1
if nextLine<0:
nextLine = self.GetDocument().MarkerGetNext(0, MARKER_BOOKMARK)-1
if nextLine <0 or nextLine == startLine-1:
win32api.MessageBeep()
else:
self.SCIEnsureVisible(nextLine)
self.SCIGotoLine(nextLine)
return 0
def TabKeyEvent(self, event):
"""Insert an indent. If no selection, a single indent, otherwise a block indent
"""
# Handle auto-complete first.
if self.SCIAutoCActive():
self.SCIAutoCComplete()
return 0
# Call the IDLE event.
return self.bindings.fire("<<smart-indent>>", event)
def EnterKeyEvent(self, event):
"""Handle the enter key with special handling for auto-complete
"""
# Handle auto-complete first.
if self.SCIAutoCActive():
self.SCIAutoCCancel()
# Call the IDLE event.
return self.bindings.fire("<<newline-and-indent>>", event)
def ShowInteractiveWindowEvent(self, event):
import pywin.framework.interact
pywin.framework.interact.ShowInteractiveWindow()
def FoldTopLevelEvent(self, event = None):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
try:
self.Colorize()
maxLine = self.GetLineCount()
# Find the first line, and check out its state.
for lineSeek in xrange(maxLine):
if self.SCIGetFoldLevel(lineSeek) & SC_FOLDLEVELHEADERFLAG:
expanding = not self.SCIGetFoldExpanded(lineSeek)
break
else:
# no folds here!
return
for lineSeek in xrange(lineSeek, maxLine):
level = self.SCIGetFoldLevel(lineSeek)
level_no = level & SC_FOLDLEVELNUMBERMASK - SC_FOLDLEVELBASE
is_header = level & SC_FOLDLEVELHEADERFLAG
# print lineSeek, level_no, is_header
if level_no == 0 and is_header:
if (expanding and not self.SCIGetFoldExpanded(lineSeek)) or \
(not expanding and self.SCIGetFoldExpanded(lineSeek)):
self.SCIToggleFold(lineSeek)
finally:
win32ui.DoWaitCursor(-1)
def FoldExpandSecondLevelEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
## I think this is needed since Scintilla may not have
## already formatted parts of file outside visible window.
self.Colorize()
levels=[SC_FOLDLEVELBASE]
## Scintilla's level number is based on amount of whitespace indentation
for lineno in xrange(self.GetLineCount()):
level = self.SCIGetFoldLevel(lineno)
if not level & SC_FOLDLEVELHEADERFLAG:
continue
curr_level = level & SC_FOLDLEVELNUMBERMASK
if curr_level > levels[-1]:
levels.append(curr_level)
try:
level_ind=levels.index(curr_level)
except ValueError:
## probably syntax error in source file, bail
break
levels=levels[:level_ind+1]
if level_ind == 1 and not self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
def FoldCollapseSecondLevelEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
## I think this is needed since Scintilla may not have
## already formatted parts of file outside visible window.
self.Colorize()
levels=[SC_FOLDLEVELBASE]
## Scintilla's level number is based on amount of whitespace indentation
for lineno in xrange(self.GetLineCount()):
level = self.SCIGetFoldLevel(lineno)
if not level & SC_FOLDLEVELHEADERFLAG:
continue
curr_level = level & SC_FOLDLEVELNUMBERMASK
if curr_level > levels[-1]:
levels.append(curr_level)
try:
level_ind=levels.index(curr_level)
except ValueError:
## probably syntax error in source file, bail
break
levels=levels[:level_ind+1]
if level_ind == 1 and self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
def FoldExpandEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
lineno = self.LineFromChar(self.GetSel()[0])
if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
not self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
def FoldExpandAllEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
for lineno in xrange(0, self.GetLineCount()):
if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
not self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
def FoldCollapseEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
lineno = self.LineFromChar(self.GetSel()[0])
if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
def FoldCollapseAllEvent(self, event):
if not self.bFolding:
return 1
win32ui.DoWaitCursor(1)
self.Colorize()
for lineno in xrange(0, self.GetLineCount()):
if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
self.SCIGetFoldExpanded(lineno):
self.SCIToggleFold(lineno)
win32ui.DoWaitCursor(-1)
from pywin.framework.editor.frame import EditorFrame
class SplitterFrame(EditorFrame):
def OnCreate(self, cs):
self.HookCommand(self.OnWindowSplit, win32ui.ID_WINDOW_SPLIT)
return 1
def OnWindowSplit(self, id, code):
self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST).DoKeyboardSplit()
return 1
from pywin.framework.editor.template import EditorTemplateBase
class SyntEditTemplate(EditorTemplateBase):
def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
if makeDoc is None: makeDoc = SyntEditDocument
if makeView is None: makeView = SyntEditView
if makeFrame is None: makeFrame = SplitterFrame
self.bSetMenus = 0
EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)
def CheckIDLEMenus(self, idle):
if self.bSetMenus: return
self.bSetMenus = 1
submenu = win32ui.CreatePopupMenu()
newitems = idle.GetMenuItems("edit")
flags=win32con.MF_STRING|win32con.MF_ENABLED
for text, event in newitems:
id = bindings.event_to_commands.get(event)
if id is not None:
keyname = pywin.scintilla.view.configManager.get_key_binding( event, ["editor"] )
if keyname is not None:
text = text + "\t" + keyname
submenu.AppendMenu(flags, id, text)
mainMenu = self.GetSharedMenu()
editMenu = mainMenu.GetSubMenu(1)
editMenu.AppendMenu(win32con.MF_SEPARATOR, 0, "")
editMenu.AppendMenu(win32con.MF_STRING | win32con.MF_POPUP | win32con.MF_ENABLED, submenu.GetHandle(), "&Source Code")
def _CreateDocTemplate(self, resourceId):
return win32ui.CreateDocTemplate(resourceId)
def CreateWin32uiDocument(self):
return self.DoCreateDoc()
def GetPythonPropertyPages(self):
"""Returns a list of property pages
"""
from pywin.scintilla import configui
return EditorTemplateBase.GetPythonPropertyPages(self) + [configui.ScintillaFormatPropertyPage()]
# For debugging purposes, when this module may be reloaded many times.
try:
win32ui.GetApp().RemoveDocTemplate(editorTemplate)
except NameError:
pass
editorTemplate = SyntEditTemplate()
win32ui.GetApp().AddDocTemplate(editorTemplate)
| apache-2.0 |
tycho01/gpudb | SQL2XML/antlr3/__init__.py | 15 | 5718 | """ @package antlr3
@brief ANTLR3 runtime package
This module contains all support classes, which are needed to use recognizers
generated by ANTLR3.
@mainpage
\note Please be warned that the line numbers in the API documentation do not
match the real locations in the source code of the package. This is an
unintended artifact of doxygen, which I could only convince to use the
correct module names by concatenating all files from the package into a single
module file...
Here is a little overview over the most commonly used classes provided by
this runtime:
@section recognizers Recognizers
These recognizers are baseclasses for the code which is generated by ANTLR3.
- BaseRecognizer: Base class with common recognizer functionality.
- Lexer: Base class for lexers.
- Parser: Base class for parsers.
- tree.TreeParser: Base class for %tree parser.
@section streams Streams
Each recognizer pulls its input from one of the stream classes below. Streams
handle stuff like buffering, look-ahead and seeking.
A character stream is usually the first element in the pipeline of a typical
ANTLR3 application. It is used as the input for a Lexer.
- ANTLRStringStream: Reads from a string objects. The input should be a unicode
object, or ANTLR3 will have trouble decoding non-ascii data.
- ANTLRFileStream: Opens a file and read the contents, with optional character
decoding.
- ANTLRInputStream: Reads the date from a file-like object, with optional
character decoding.
A Parser needs a TokenStream as input (which in turn is usually fed by a
Lexer):
- CommonTokenStream: A basic and most commonly used TokenStream
implementation.
- TokenRewriteStream: A modification of CommonTokenStream that allows the
stream to be altered (by the Parser). See the 'tweak' example for a usecase.
And tree.TreeParser finally fetches its input from a tree.TreeNodeStream:
- tree.CommonTreeNodeStream: A basic and most commonly used tree.TreeNodeStream
implementation.
@section tokenstrees Tokens and Trees
A Lexer emits Token objects which are usually buffered by a TokenStream. A
Parser can build a Tree, if the output=AST option has been set in the grammar.
The runtime provides these Token implementations:
- CommonToken: A basic and most commonly used Token implementation.
- ClassicToken: A Token object as used in ANTLR 2.x, used to %tree
construction.
Tree objects are wrapper for Token objects.
- tree.CommonTree: A basic and most commonly used Tree implementation.
A tree.TreeAdaptor is used by the parser to create tree.Tree objects for the
input Token objects.
- tree.CommonTreeAdaptor: A basic and most commonly used tree.TreeAdaptor
implementation.
@section Exceptions
RecognitionException are generated, when a recognizer encounters incorrect
or unexpected input.
- RecognitionException
- MismatchedRangeException
- MismatchedSetException
- MismatchedNotSetException
.
- MismatchedTokenException
- MismatchedTreeNodeException
- NoViableAltException
- EarlyExitException
- FailedPredicateException
.
.
A tree.RewriteCardinalityException is raised, when the parsers hits a
cardinality mismatch during AST construction. Although this is basically a
bug in your grammar, it can only be detected at runtime.
- tree.RewriteCardinalityException
- tree.RewriteEarlyExitException
- tree.RewriteEmptyStreamException
.
.
"""
# tree.RewriteRuleElementStream
# tree.RewriteRuleSubtreeStream
# tree.RewriteRuleTokenStream
# CharStream
# DFA
# TokenSource
# [The "BSD licence"]
# Copyright (c) 2005-2008 Terence Parr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = '3.1.3'
def version_str_to_tuple(version_str):
import re
import sys
if version_str == 'HEAD':
return (sys.maxint, sys.maxint, sys.maxint, sys.maxint)
m = re.match(r'(\d+)\.(\d+)(\.(\d+))?(b(\d+))?', version_str)
if m is None:
raise ValueError("Bad version string %r" % version_str)
major = int(m.group(1))
minor = int(m.group(2))
patch = int(m.group(4) or 0)
beta = int(m.group(6) or sys.maxint)
return (major, minor, patch, beta)
runtime_version_str = __version__
runtime_version = version_str_to_tuple(runtime_version_str)
from constants import *
from dfa import *
from exceptions import *
from recognizers import *
from streams import *
from tokens import *
| apache-2.0 |
rogalski/pylint | pylint/test/functional/arguments.py | 1 | 5401 | # pylint: disable=too-few-public-methods, no-absolute-import,missing-docstring,import-error,wrong-import-position
# pylint: disable=wrong-import-order
def decorator(fun):
"""Decorator"""
return fun
class DemoClass(object):
"""Test class for method invocations."""
@staticmethod
def static_method(arg):
"""static method."""
return arg + arg
@classmethod
def class_method(cls, arg):
"""class method"""
return arg + arg
def method(self, arg):
"""method."""
return (self, arg)
@decorator
def decorated_method(self, arg):
"""decorated method."""
return (self, arg)
def function_1_arg(first_argument):
"""one argument function"""
return first_argument
def function_3_args(first_argument, second_argument, third_argument):
"""three arguments function"""
return first_argument, second_argument, third_argument
def function_default_arg(one=1, two=2):
"""fonction with default value"""
return two, one
function_1_arg(420)
function_1_arg() # [no-value-for-parameter]
function_1_arg(1337, 347) # [too-many-function-args]
function_3_args(420, 789) # [no-value-for-parameter]
# +1:[no-value-for-parameter,no-value-for-parameter,no-value-for-parameter]
function_3_args()
function_3_args(1337, 347, 456)
function_3_args('bab', 'bebe', None, 5.6) # [too-many-function-args]
function_default_arg(1, two=5)
function_default_arg(two=5)
function_1_arg(bob=4) # [unexpected-keyword-arg,no-value-for-parameter]
function_default_arg(1, 4, coin="hello") # [unexpected-keyword-arg]
function_default_arg(1, one=5) # [redundant-keyword-arg]
# Remaining tests are for coverage of correct names in messages.
LAMBDA = lambda arg: 1
LAMBDA() # [no-value-for-parameter]
def method_tests():
"""Method invocations."""
demo = DemoClass()
demo.static_method() # [no-value-for-parameter]
DemoClass.static_method() # [no-value-for-parameter]
demo.class_method() # [no-value-for-parameter]
DemoClass.class_method() # [no-value-for-parameter]
demo.method() # [no-value-for-parameter]
DemoClass.method(demo) # [no-value-for-parameter]
demo.decorated_method() # [no-value-for-parameter]
DemoClass.decorated_method(demo) # [no-value-for-parameter]
# Test a regression (issue #234)
import sys
class Text(object):
""" Regression """
if sys.version_info > (3,):
def __new__(cls):
""" empty """
return object.__new__(cls)
else:
def __new__(cls):
""" empty """
return object.__new__(cls)
Text()
class TestStaticMethod(object):
@staticmethod
def test(first, second=None, **kwargs):
return first, second, kwargs
def func(self):
self.test(42)
self.test(42, second=34)
self.test(42, 42)
self.test() # [no-value-for-parameter]
self.test(42, 42, 42) # [too-many-function-args]
class TypeCheckConstructor(object):
def __init__(self, first, second):
self.first = first
self.second = second
def test(self):
type(self)(1, 2, 3) # [too-many-function-args]
# +1: [no-value-for-parameter,no-value-for-parameter]
type(self)()
type(self)(1, lala=2) # [no-value-for-parameter,unexpected-keyword-arg]
type(self)(1, 2)
type(self)(first=1, second=2)
class Test(object):
""" lambda needs Test instance as first argument """
lam = lambda self, icon: (self, icon)
def test(self):
self.lam(42)
self.lam() # [no-value-for-parameter]
self.lam(1, 2, 3) # [too-many-function-args]
Test().lam() # [no-value-for-parameter]
# Don't emit a redundant-keyword-arg for this example,
# it's perfectly valid
class Issue642(object):
attr = 0
def __str__(self):
return "{self.attr}".format(self=self)
# These should not emit anything regarding the number of arguments,
# since they have something invalid.
from ala_bala_portocola import unknown
# pylint: disable=not-a-mapping,not-an-iterable
function_1_arg(*unknown)
function_1_arg(1, *2)
function_1_arg(1, 2, 3, **unknown)
function_1_arg(4, 5, **1)
function_1_arg(5, 6, **{unknown: 1})
function_1_arg(**{object: 1})
function_1_arg(**{1: 2})
def no_context_but_redefined(*args):
args = [1]
#+1: [no-value-for-parameter, no-value-for-parameter]
expect_three(*list(args))
def no_context_one_elem(*args):
expect_three(args) # [no-value-for-parameter, no-value-for-parameter]
# Don't emit no-value-for-parameter for this, since we
# don't have the context at our disposal.
def expect_three(one, two, three):
return one + two + three
def no_context(*args):
expect_three(*args)
def no_context_two(*args):
expect_three(*list(args))
def no_context_three(*args):
expect_three(*set(args))
def compare_prices(arg):
return set((arg, ))
def find_problems2(prob_dates):
for fff in range(10):
prob_dates |= compare_prices(fff)
from collections import namedtuple
def namedtuple_replace_issue_1036():
cls = namedtuple('cls', 'a b c')
new_instance = cls(1, 2, 3)._replace(
a=24,
b=24,
c=42
)
# +1: [unexpected-keyword-arg, unexpected_keyword-arg]
new_bad_instance = cls(1, 2, 3)._replace(
d=24,
e=32,
)
return new_instance, new_bad_instance
| gpl-2.0 |
pcm17/tensorflow | tensorflow/contrib/distributions/python/ops/operator_pd.py | 79 | 30499 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base class for symmetric positive definite operator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
@six.add_metaclass(abc.ABCMeta)
class OperatorPDBase(object):
"""Class representing a (batch) of positive definite matrices `A`.
This class provides access to functions of a (batch) symmetric positive
definite (PD) matrix, without the need to materialize them. In other words,
this provides means to do "matrix free" computations.
### Basics
For example, `my_operator.matmul(x)` computes the result of matrix
multiplication, and this class is free to do this computation with or without
ever materializing a matrix.
In practice, this operator represents a (batch) matrix `A` with shape
`[N1,...,Nn, k, k]` for some `n >= 0`. The first `n` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,in, : :]` is
a `k x k` matrix. Again, this matrix `A` may not be materialized, but for
purposes of broadcasting this shape will be relevant.
Since `A` is (batch) positive definite, it has a (or several) square roots `S`
such that `A = SS^T`.
For example, if `MyOperator` inherits from `OperatorPDBase`, the user can do
```python
operator = MyOperator(...) # Initialize with some tensors.
operator.log_det()
# Compute the quadratic form x^T A^{-1} x for vector x.
x = ... # some shape [M1,...,Mm, N1,...,Nn, k] tensor
operator.inv_quadratic_form_on_vectors(x)
# Matrix multiplication by the square root, S w.
# If w is iid normal, S w has covariance A.
w = ... # some shape [N1,...,Nn, k, r] tensor, r >= 1
operator.sqrt_matmul(w)
```
The above three methods, `log_det`, `inv_quadratic_form_on_vectors`, and
`sqrt_matmul` provide "all" that is necessary to use a covariance matrix
in a multi-variate normal distribution. See the class `MVNOperatorPD`.
### Details about shape requirements
The `Operator` classes operate on batch vectors and batch matrices with
compatible shapes. `matrix` is a batch matrix with compatible shape if
```
operator.shape = [N1,...,Nn] + [j, k]
matrix.shape = [N1,...,Nn] + [k, r]
```
This is the same requirement as `tf.matmul`. `vec` is a batch vector with
compatible shape if
```
operator.shape = [N1,...,Nn] + [j, k]
vec.shape = [N1,...,Nn] + [k]
OR
vec.shape = [M1,...,Mm] + [N1,...,Nn] + [k]
```
We are strict with the matrix shape requirements since we do not want to
require `Operator` broadcasting. The `Operator` may be defined by large
tensors (thus broadcasting is expensive), or the `Operator` may be matrix
free, in which case there is no guarantee that the underlying implementation
will broadcast.
We are more flexible with vector shapes since extra leading dimensions can
be "flipped" to the end to change the vector to a compatible matrix.
"""
@abc.abstractproperty
def name(self):
"""String name identifying this `Operator`."""
return self._name
@abc.abstractproperty
def verify_pd(self):
"""Whether to verify that this `Operator` is positive definite."""
# return self._verify_pd
pass
@abc.abstractproperty
def dtype(self):
"""Data type of matrix elements of `A`."""
pass
def add_to_tensor(self, mat, name="add_to_tensor"):
"""Add matrix represented by this operator to `mat`. Equiv to `A + mat`.
Args:
mat: `Tensor` with same `dtype` and shape broadcastable to `self`.
name: A name to give this `Op`.
Returns:
A `Tensor` with broadcast shape and same `dtype` as `self`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs + [mat]):
mat = ops.convert_to_tensor(mat, name="mat")
return self._add_to_tensor(mat)
def _add_to_tensor(self, mat):
# Re-implement in derived class if a more efficient method is available.
return self.to_dense() + mat
def _dispatch_based_on_batch(self, batch_method, singleton_method, **args):
"""Helper to automatically call batch or singleton operation."""
if self.get_shape().ndims is not None:
is_batch = self.get_shape().ndims > 2
if is_batch:
return batch_method(**args)
else:
return singleton_method(**args)
else:
is_batch = self.rank() > 2
return control_flow_ops.cond(
is_batch,
lambda: batch_method(**args),
lambda: singleton_method(**args)
)
def inv_quadratic_form_on_vectors(
self, x, name="inv_quadratic_form_on_vectors"):
"""Compute the quadratic form: `x^T A^{-1} x` where `x` is a batch vector.
`x` is a batch vector with compatible shape if
```
self.shape = [N1,...,Nn] + [k, k]
x.shape = [M1,...,Mm] + [N1,...,Nn] + [k]
```
Args:
x: `Tensor` with compatible batch vector shape and same `dtype` as self.
name: A name scope to use for ops added by this method.
Returns:
`Tensor` with shape `[M1,...,Mm] + [N1,...,Nn]` and same `dtype`
as `self`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[x] + self.inputs):
x = ops.convert_to_tensor(x, name="x")
return self._inv_quadratic_form_on_vectors(x)
def _inv_quadratic_form_on_vectors(self, x):
# Implement in derived class to enable self.inv_quadratic_form_on_vectors().
#
# To implement,
# Depending on which is more efficient, derived class should be a one-liner
# calling either
# return self._iqfov_via_sqrt_solve(x)
# OR
# return self._iqfov_via_solve(x)
# both of which are written in this base class.
raise NotImplementedError(
"inv_quadratic_form_on_vectors not implemented")
def _iqfov_via_sqrt_solve(self, x):
"""Get the inverse quadratic form on vectors via a sqrt_solve."""
# x^{-1} A^{-1} x = || S^{-1}x ||^2,
# where S is a square root of A (A = SS^T).
# Steps:
# 1. Convert x to a matrix, flipping all extra dimensions in `x` to the
# final dimension of x_matrix.
x_matrix = flip_vector_to_matrix(
x, self.batch_shape(), self.get_batch_shape())
# 2. Get soln_matrix = S^{-1} x_matrix
soln_matrix = self.sqrt_solve(x_matrix)
# 3. Reshape back to a vector.
soln = flip_matrix_to_vector(
soln_matrix, extract_batch_shape(x, 1), x.get_shape()[:-1])
# 4. L2 (batch) vector norm squared.
result = math_ops.reduce_sum(
math_ops.square(soln), reduction_indices=[-1])
result.set_shape(x.get_shape()[:-1])
return result
def _iqfov_via_solve(self, x):
"""Get the inverse quadratic form on vectors via a solve."""
# x^{-1} A^{-1} x
# 1. Convert x to a matrix, flipping all extra dimensions in `x` to the
# final dimension of x_matrix.
x_matrix = flip_vector_to_matrix(
x, self.batch_shape(), self.get_batch_shape())
# 2. Get x_whitened_matrix = A^{-1} x_matrix
soln_matrix = self.solve(x_matrix)
# 3. Reshape back to a vector.
soln = flip_matrix_to_vector(
soln_matrix, extract_batch_shape(x, 1), x.get_shape()[:-1])
# 4. Compute the dot product: x^T soln
result = math_ops.reduce_sum(x * soln, reduction_indices=[-1])
result.set_shape(x.get_shape()[:-1])
return result
def det(self, name="det"):
"""Determinant for every batch member.
Args:
name: A name scope to use for ops added by this method.
Returns:
Determinant for every batch member.
"""
# Derived classes are encouraged to implement log_det() (since it is
# usually more stable), and then det() comes for free.
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._det()
def _det(self):
return math_ops.exp(self.log_det())
def log_det(self, name="log_det"):
"""Log of the determinant for every batch member.
Args:
name: A name scope to use for ops added by this method.
Returns:
Logarithm of determinant for every batch member.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._dispatch_based_on_batch(self._batch_log_det, self._log_det)
def _batch_log_det(self):
# Implement in derived class to enable self.log_det(x).
raise NotImplementedError("Log determinant (log_det) not implemented.")
def _log_det(self):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_log_det()
def sqrt_log_abs_det(self, name="sqrt_log_det"):
"""Log absolute value determinant of the sqrt `S` for every batch member.
In most cases, this will be the same as `sqrt_log_det`, but for certain
operators defined by a square root, this might be implemented slightly
differently.
Args:
name: A name scope to use for ops added by this method.
Returns:
Logarithm of absolute value determinant of the square root `S` for
every batch member.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._dispatch_based_on_batch(
self._batch_sqrt_log_abs_det, self._sqrt_log_abs_det)
def sqrt_log_det(self, name="sqrt_log_det"):
"""Log of the determinant of the sqrt `S` for every batch member.
Args:
name: A name scope to use for ops added by this method.
Returns:
Logarithm of determinant of the square root `S` for every batch member.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._dispatch_based_on_batch(
self._batch_sqrt_log_det, self._sqrt_log_det)
def _batch_sqrt_log_det(self):
# Over-ride in derived class if it can be done more efficiently.
return 0.5 * self._log_det()
def _sqrt_log_det(self):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_sqrt_log_det()
def _batch_sqrt_log_abs_det(self):
# Over-ride in derived class if it can be done more efficiently.
return self._sqrt_log_det()
def _sqrt_log_abs_det(self):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_sqrt_log_abs_det()
@abc.abstractproperty
def inputs(self):
"""List of tensors that were provided as initialization inputs."""
pass
@abc.abstractmethod
def get_shape(self):
"""Static `TensorShape` of entire operator.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, then this returns
`TensorShape([N1,...,Nn, k, k])`
Returns:
`TensorShape`, statically determined, may be undefined.
"""
pass
def get_batch_shape(self):
"""`TensorShape` with batch shape. Statically determined if possible.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, then this returns `TensorShape([N1,...,Nn])`
Returns:
`TensorShape`, statically determined, may be undefined.
"""
# Derived classes get this "for free" once .get_shape() is implemented.
return self.get_shape()[:-2]
def get_vector_shape(self):
"""`TensorShape` of vectors this operator will work with.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, then this returns
`TensorShape([N1,...,Nn, k])`
Returns:
`TensorShape`, statically determined, may be undefined.
"""
# Derived classes get this "for free" once .get_shape() is implemented.
return self.get_shape()[:-1]
def shape(self, name="shape"):
"""Equivalent to `tf.shape(A).` Equal to `[N1,...,Nn, k, k]`, `n >= 0`.
Args:
name: A name scope to use for ops added by this method.
Returns:
`int32` `Tensor`
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._shape()
@abc.abstractmethod
def _shape(self):
# Implement in derived class to enable .shape().
pass
def rank(self, name="rank"):
"""Tensor rank. Equivalent to `tf.rank(A)`. Will equal `n + 2`.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, the `rank` is `n + 2`.
Args:
name: A name scope to use for ops added by this method.
Returns:
`int32` `Tensor`
"""
# Derived classes get this "for free" once .shape() is implemented.
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return array_ops.size(self.shape())
def batch_shape(self, name="batch_shape"):
"""Shape of batches associated with this operator.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, the `batch_shape` is `[N1,...,Nn]`.
Args:
name: A name scope to use for ops added by this method.
Returns:
`int32` `Tensor`
"""
# Derived classes get this "for free" once .shape() is implemented.
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return array_ops.strided_slice(self.shape(), [0], [self.rank() - 2])
def vector_shape(self, name="vector_shape"):
"""Shape of (batch) vectors that this (batch) matrix will multiply.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, the `vector_shape` is `[N1,...,Nn, k]`.
Args:
name: A name scope to use for ops added by this method.
Returns:
`int32` `Tensor`
"""
# Derived classes get this "for free" once .shape() is implemented.
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return array_ops.concat(
(self.batch_shape(), [self.vector_space_dimension()]), 0)
def vector_space_dimension(self, name="vector_space_dimension"):
"""Dimension of vector space on which this acts. The `k` in `R^k`.
If this operator represents the batch matrix `A` with
`A.shape = [N1,...,Nn, k, k]`, the `vector_space_dimension` is `k`.
Args:
name: A name scope to use for ops added by this method.
Returns:
`int32` `Tensor`
"""
# Derived classes get this "for free" once .shape() is implemented.
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return array_ops.gather(self.shape(), self.rank() - 1)
def matmul(self, x, transpose_x=False, name="matmul"):
"""Left (batch) matmul `x` by this matrix: `Ax`.
`x` is a batch matrix with compatible shape if
```
self.shape = [N1,...,Nn] + [k, k]
x.shape = [N1,...,Nn] + [k, r]
```
Args:
x: `Tensor` with shape `self.batch_shape + [k, r]` and same `dtype` as
this `Operator`.
transpose_x: If `True`, `x` is transposed before multiplication.
name: A name to give this `Op`.
Returns:
A result equivalent to `tf.matmul(self.to_dense(), x)`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[x] + self.inputs):
x = ops.convert_to_tensor(x, name="x")
return self._dispatch_based_on_batch(
self._batch_matmul, self._matmul, x=x, transpose_x=transpose_x)
def _batch_matmul(self, x, transpose_x=False):
# Implement in derived class to enable self.matmul(x).
raise NotImplementedError("This operator has no batch matmul Op.")
def _matmul(self, x, transpose_x=False):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_matmul(x, transpose_x=transpose_x)
def sqrt_matmul(self, x, transpose_x=False, name="sqrt_matmul"):
"""Left (batch) matmul `x` by a sqrt of this matrix: `Sx` where `A = S S^T`.
`x` is a batch matrix with compatible shape if
```
self.shape = [N1,...,Nn] + [k, k]
x.shape = [N1,...,Nn] + [k, r]
```
Args:
x: `Tensor` with shape `self.batch_shape + [k, r]` and same `dtype` as
this `Operator`.
transpose_x: If `True`, `x` is transposed before multiplication.
name: A name scope to use for ops added by this method.
Returns:
A result equivalent to `tf.matmul(self.sqrt_to_dense(), x)`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[x] + self.inputs):
x = ops.convert_to_tensor(x, name="x")
return self._dispatch_based_on_batch(
self._batch_sqrt_matmul, self._sqrt_matmul, x=x,
transpose_x=transpose_x)
def _batch_sqrt_matmul(self, x, transpose_x=False):
# Implement in derived class to enable self.sqrt_matmul(x).
raise NotImplementedError("This operator has no batch_sqrt_matmul Op.")
def _sqrt_matmul(self, x, transpose_x=False):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_sqrt_matmul(x, transpose_x=transpose_x)
def solve(self, rhs, name="solve"):
"""Solve `r` batch systems: `A X = rhs`.
`rhs` is a batch matrix with compatible shape if
```python
self.shape = [N1,...,Nn] + [k, k]
rhs.shape = [N1,...,Nn] + [k, r]
```
For every batch member, this is done in `O(r*k^2)` complexity using back
substitution.
```python
# Solve one linear system (r = 1) for every member of the length 10 batch.
A = ... # shape 10 x 2 x 2
RHS = ... # shape 10 x 2 x 1
operator.shape # = 10 x 2 x 2
X = operator.squrt_solve(RHS) # shape 10 x 2 x 1
# operator.squrt_matmul(X) ~ RHS
X[3, :, 0] # Solution to the linear system A[3, :, :] x = RHS[3, :, 0]
# Solve five linear systems (r = 5) for every member of the length 10 batch.
operator.shape # = 10 x 2 x 2
RHS = ... # shape 10 x 2 x 5
...
X[3, :, 2] # Solution to the linear system A[3, :, :] x = RHS[3, :, 2]
```
Args:
rhs: `Tensor` with same `dtype` as this operator and compatible shape,
`rhs.shape = self.shape[:-1] + [r]` for `r >= 1`.
name: A name scope to use for ops added by this method.
Returns:
`Tensor` with same `dtype` and shape as `x`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[rhs] + self.inputs):
rhs = ops.convert_to_tensor(rhs, name="rhs")
return self._dispatch_based_on_batch(
self._batch_solve, self._solve, rhs=rhs)
def _solve(self, rhs):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_solve(rhs)
def _batch_solve(self, rhs):
# Implement in derived class to enable self.solve().
raise NotImplementedError("batch_solve not implemented for this Operator.")
def sqrt_solve(self, rhs, name="sqrt_solve"):
"""Solve `r` batch systems involving sqrt: `S X = rhs` where `A = SS^T`.
`rhs` is a batch matrix with compatible shape if
```python
self.shape = [N1,...,Nn] + [k, k]
rhs.shape = [N1,...,Nn] + [k, r]
```
For every batch member, this is done in `O(r*k^2)` complexity using back
substitution.
```python
# Solve one linear system (r = 1) for every member of the length 10 batch.
A = ... # shape 10 x 2 x 2
RHS = ... # shape 10 x 2 x 1
operator.shape # = 10 x 2 x 2
X = operator.squrt_solve(RHS) # shape 10 x 2 x 1
# operator.squrt_matmul(X) ~ RHS
X[3, :, 0] # Solution to the linear system S[3, :, :] x = RHS[3, :, 0]
# Solve five linear systems (r = 5) for every member of the length 10 batch.
operator.shape # = 10 x 2 x 2
RHS = ... # shape 10 x 2 x 5
...
X[3, :, 2] # Solution to the linear system S[3, :, :] x = RHS[3, :, 2]
```
Args:
rhs: `Tensor` with same `dtype` as this operator and compatible shape,
`rhs.shape = self.shape[:-1] + [r]` for `r >= 1`.
name: A name scope to use for ops added by this method.
Returns:
`Tensor` with same `dtype` and shape as `x`.
"""
with ops.name_scope(self.name):
with ops.name_scope(name, values=[rhs] + self.inputs):
rhs = ops.convert_to_tensor(rhs, name="rhs")
return self._dispatch_based_on_batch(
self._batch_sqrt_solve, self._sqrt_solve, rhs=rhs)
def _sqrt_solve(self, rhs):
# As implemented here, this just calls the batch version. If a more
# efficient non-batch version is available, override in the derived class.
return self._batch_sqrt_solve(rhs)
def _batch_sqrt_solve(self, rhs):
# Implement in derived class to enable self.sqrt_solve()
raise NotImplementedError(
"batch sqrt_solve not implemented for this Operator.")
def to_dense(self, name="to_dense"):
"""Return a dense (batch) matrix representing this operator."""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._to_dense()
def _to_dense(self):
# Implement in derived class to enable self.to_dense().
raise NotImplementedError("This operator has no dense representation.")
def sqrt_to_dense(self, name="sqrt_to_dense"):
"""Return a dense (batch) matrix representing sqrt of this operator."""
with ops.name_scope(self.name):
with ops.name_scope(name, values=self.inputs):
return self._sqrt_to_dense()
def _sqrt_to_dense(self):
# Implement in derived class to enable self.sqrt_to_dense().
raise NotImplementedError("This operator has no dense sqrt representation.")
def flip_matrix_to_vector(mat, batch_shape, static_batch_shape):
"""Flip dims to reshape batch matrix `mat` to a vector with given batch shape.
```python
mat = tf.random_normal(2, 3, 4, 6)
# Flip the trailing dimension around to the front.
flip_matrix_to_vector(mat, [6, 2, 3], [6, 3, 2]) # Shape [6, 2, 3, 4]
# Flip the trailing dimension around then reshape batch indices to batch_shape
flip_matrix_to_vector(mat, [6, 3, 2], [6, 3, 2]) # Shape [6, 3, 2, 4]
flip_matrix_to_vector(mat, [2, 3, 2, 3], [2,3,2,3]) # Shape [2, 3, 2, 3, 4]
```
Assume `mat.shape = matrix_batch_shape + [k, M]`. The returned vector is
generated in two steps:
1. Flip the final dimension to the front, giving a shape
`[M] + matrix_batch_shape + [k]`.
2. Reshape the leading dimensions, giving final shape = `batch_shape + [k]`.
The reshape in step 2 will fail if the number of elements is not equal, i.e.
`M*prod(matrix_batch_shape) != prod(batch_shape)`.
See also: flip_vector_to_matrix.
Args:
mat: `Tensor` with rank `>= 2`.
batch_shape: `int32` `Tensor` giving leading "batch" shape of result.
static_batch_shape: `TensorShape` object giving batch shape of result.
Returns:
`Tensor` with same elements as `mat` but with shape `batch_shape + [k]`.
"""
mat = ops.convert_to_tensor(mat, name="mat")
if (static_batch_shape.is_fully_defined()
and mat.get_shape().is_fully_defined()):
return _flip_matrix_to_vector_static(mat, static_batch_shape)
else:
return _flip_matrix_to_vector_dynamic(mat, batch_shape)
def _flip_matrix_to_vector_static(mat, static_batch_shape):
"""Flip matrix to vector with static shapes."""
mat_rank = mat.get_shape().ndims
k = mat.get_shape()[-2]
final_shape = static_batch_shape.concatenate(k)
# mat.shape = matrix_batch_shape + [k, M]
# Permutation corresponding to [M] + matrix_batch_shape + [k]
perm = [mat_rank - 1] + list(range(0, mat_rank - 1))
mat_with_end_at_beginning = array_ops.transpose(mat, perm=perm)
vector = array_ops.reshape(mat_with_end_at_beginning, final_shape)
return vector
def _flip_matrix_to_vector_dynamic(mat, batch_shape):
"""Flip matrix to vector with dynamic shapes."""
mat_rank = array_ops.rank(mat)
k = array_ops.gather(array_ops.shape(mat), mat_rank - 2)
final_shape = array_ops.concat((batch_shape, [k]), 0)
# mat.shape = matrix_batch_shape + [k, M]
# Permutation corresponding to [M] + matrix_batch_shape + [k]
perm = array_ops.concat(([mat_rank - 1], math_ops.range(0, mat_rank - 1)), 0)
mat_with_end_at_beginning = array_ops.transpose(mat, perm=perm)
vector = array_ops.reshape(mat_with_end_at_beginning, final_shape)
return vector
def flip_vector_to_matrix(vec, batch_shape, static_batch_shape):
"""Flip dims to reshape batch vector `x` to a matrix with given batch shape.
```python
vec = tf.random_normal(2, 3, 4, 5)
# Flip the leading dimension to the end.
flip_vector_to_matrix(vec, [3, 4], [3, 4]) # Shape [3, 4, 5, 2]
# Flip nothing, just extend with a singleton dimension.
flip_vector_to_matrix(vec, [2, 3, 4], [2, 3, 4]) # Shape [2, 3, 4, 5, 1]
# Flip leading dimension to the end and reshape the batch indices to
# batch_shape.
flip_vector_to_matrix(vec, [4, 3], [4, 3]) # Shape [4, 3, 5, 2]
```
Suppose `batch_shape` is length `n`. Then...
Given `vec.shape = [M1,...,Mm] + [N1,...,Nn] + [k]`, for some
`m > 0` we reshape to a batch matrix with shape `batch_shape + [k, M]`
where `M = M1*...*Mm`. This is done by "flipping" the leading dimensions to
the end and possibly reshaping `[N1,...,Nn]` to `batch_shape`.
In the case `vec.shape = [N1,...,Nn] + [k]`, we reshape to
`batch_shape + [k, 1]` by extending the tensor with a singleton dimension and
possibly reshaping `[N1,...,Nn]` to `batch_shape`.
See also: flip_matrix_to_vector.
Args:
vec: `Tensor` with shape `[M1,...,Mm] + [N1,...,Nn] + [k]`
batch_shape: `int32` `Tensor`.
static_batch_shape: `TensorShape` with statically determined batch shape.
Returns:
`Tensor` with same `dtype` as `vec` and new shape.
"""
vec = ops.convert_to_tensor(vec, name="vec")
if (
vec.get_shape().is_fully_defined()
and static_batch_shape.is_fully_defined()):
return _flip_vector_to_matrix_static(vec, static_batch_shape)
else:
return _flip_vector_to_matrix_dynamic(vec, batch_shape)
def _flip_vector_to_matrix_dynamic(vec, batch_shape):
"""flip_vector_to_matrix with dynamic shapes."""
# Shapes associated with batch_shape
batch_rank = array_ops.size(batch_shape)
# Shapes associated with vec.
vec = ops.convert_to_tensor(vec, name="vec")
vec_shape = array_ops.shape(vec)
vec_rank = array_ops.rank(vec)
vec_batch_rank = vec_rank - 1
m = vec_batch_rank - batch_rank
# vec_shape_left = [M1,...,Mm] or [].
vec_shape_left = array_ops.strided_slice(vec_shape, [0], [m])
# If vec_shape_left = [], then condensed_shape = [1] since reduce_prod([]) = 1
# If vec_shape_left = [M1,...,Mm], condensed_shape = [M1*...*Mm]
condensed_shape = [math_ops.reduce_prod(vec_shape_left)]
k = array_ops.gather(vec_shape, vec_rank - 1)
new_shape = array_ops.concat((batch_shape, [k], condensed_shape), 0)
def _flip_front_dims_to_back():
# Permutation corresponding to [N1,...,Nn] + [k, M1,...,Mm]
perm = array_ops.concat((math_ops.range(m, vec_rank), math_ops.range(0, m)),
0)
return array_ops.transpose(vec, perm=perm)
x_flipped = control_flow_ops.cond(
math_ops.less(0, m),
_flip_front_dims_to_back,
lambda: array_ops.expand_dims(vec, -1))
return array_ops.reshape(x_flipped, new_shape)
def _flip_vector_to_matrix_static(vec, batch_shape):
"""flip_vector_to_matrix with static shapes."""
# Shapes associated with batch_shape
batch_rank = batch_shape.ndims
# Shapes associated with vec.
vec = ops.convert_to_tensor(vec, name="vec")
vec_shape = vec.get_shape()
vec_rank = len(vec_shape)
vec_batch_rank = vec_rank - 1
m = vec_batch_rank - batch_rank
# vec_shape_left = [M1,...,Mm] or [].
vec_shape_left = vec_shape[:m]
# If vec_shape_left = [], then condensed_shape = [1] since reduce_prod([]) = 1
# If vec_shape_left = [M1,...,Mm], condensed_shape = [M1*...*Mm]
condensed_shape = [np.prod(vec_shape_left)]
k = vec_shape[-1]
new_shape = batch_shape.concatenate(k).concatenate(condensed_shape)
def _flip_front_dims_to_back():
# Permutation corresponding to [N1,...,Nn] + [k, M1,...,Mm]
perm = array_ops.concat((math_ops.range(m, vec_rank), math_ops.range(0, m)),
0)
return array_ops.transpose(vec, perm=perm)
if 0 < m:
x_flipped = _flip_front_dims_to_back()
else:
x_flipped = array_ops.expand_dims(vec, -1)
return array_ops.reshape(x_flipped, new_shape)
def extract_batch_shape(x, num_event_dims, name="extract_batch_shape"):
"""Extract the batch shape from `x`.
Assuming `x.shape = batch_shape + event_shape`, when `event_shape` has
`num_event_dims` dimensions. This `Op` returns the batch shape `Tensor`.
Args:
x: `Tensor` with rank at least `num_event_dims`. If rank is not high enough
this `Op` will fail.
num_event_dims: `int32` scalar `Tensor`. The number of trailing dimensions
in `x` to be considered as part of `event_shape`.
name: A name to prepend to created `Ops`.
Returns:
batch_shape: `1-D` `int32` `Tensor`
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
return array_ops.strided_slice(
array_ops.shape(x), [0], [array_ops.rank(x) - num_event_dims])
| apache-2.0 |
hofschroeer/gnuradio | gr-fec/python/fec/qa_fecapi_cc.py | 7 | 6822 | #!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import absolute_import
from gnuradio import gr, gr_unittest
from gnuradio import fec
from _qa_helper import _qa_helper
class test_fecapi_cc(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_parallelism0_00(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = fec.cc_encoder_make(frame_size*8, k, rate, polys)
dec = fec.cc_decoder.make(frame_size*8, k, rate, polys)
threading = None
self.test = _qa_helper(4*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism0_01(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = fec.cc_encoder_make(frame_size*8, k, rate, polys)
dec = fec.cc_decoder.make(frame_size*8, k, rate, polys)
threading = 'ordinary'
self.test = _qa_helper(5*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism0_02(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = fec.cc_encoder_make(frame_size*8, k, rate, polys)
dec = fec.cc_decoder.make(frame_size*8, k, rate, polys)
threading = 'capillary'
self.test = _qa_helper(5*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_00(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys)), list(range(0,1))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys)), list(range(0,1))))
threading = None
self.test = _qa_helper(5*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_01(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys)), list(range(0,1))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys)), list(range(0,1))))
threading = 'ordinary'
self.test = _qa_helper(5*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_02(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys)), list(range(0,1))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys)), list(range(0,1))))
threading = 'capillary'
self.test = _qa_helper(5*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_03(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
mode = fec.CC_TERMINATED
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
threading = 'capillary'
self.test = _qa_helper(4*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_04(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
mode = fec.CC_TRUNCATED
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
threading = 'capillary'
self.test = _qa_helper(4*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
def test_parallelism1_05(self):
frame_size = 30
k = 7
rate = 2
polys = [109,79]
mode = fec.CC_TAILBITING
enc = list(map((lambda a: fec.cc_encoder_make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
dec = list(map((lambda a: fec.cc_decoder.make(frame_size*8, k, rate, polys, mode=mode)), list(range(0,4))))
threading = 'capillary'
self.test = _qa_helper(4*frame_size, enc, dec, threading)
self.tb.connect(self.test)
self.tb.run()
data_out = self.test.snk_output.data()
data_in = self.test.snk_input.data()[0:len(data_out)]
self.assertEqual(data_in, data_out)
if __name__ == '__main__':
gr_unittest.run(test_fecapi_cc, "test_fecapi_cc.xml")
| gpl-3.0 |
ppizarror/Hero-of-Antair | data/images/pil/PalmImagePlugin.py | 2 | 9579 | #
# The Python Imaging Library.
# $Id$
#
##
# Image plugin for Palm pixmap images (output only).
##
__version__ = "1.0"
import Image, ImageFile
_Palm8BitColormapValues = (
( 255, 255, 255 ), ( 255, 204, 255 ), ( 255, 153, 255 ), ( 255, 102, 255 ),
( 255, 51, 255 ), ( 255, 0, 255 ), ( 255, 255, 204 ), ( 255, 204, 204 ),
( 255, 153, 204 ), ( 255, 102, 204 ), ( 255, 51, 204 ), ( 255, 0, 204 ),
( 255, 255, 153 ), ( 255, 204, 153 ), ( 255, 153, 153 ), ( 255, 102, 153 ),
( 255, 51, 153 ), ( 255, 0, 153 ), ( 204, 255, 255 ), ( 204, 204, 255 ),
( 204, 153, 255 ), ( 204, 102, 255 ), ( 204, 51, 255 ), ( 204, 0, 255 ),
( 204, 255, 204 ), ( 204, 204, 204 ), ( 204, 153, 204 ), ( 204, 102, 204 ),
( 204, 51, 204 ), ( 204, 0, 204 ), ( 204, 255, 153 ), ( 204, 204, 153 ),
( 204, 153, 153 ), ( 204, 102, 153 ), ( 204, 51, 153 ), ( 204, 0, 153 ),
( 153, 255, 255 ), ( 153, 204, 255 ), ( 153, 153, 255 ), ( 153, 102, 255 ),
( 153, 51, 255 ), ( 153, 0, 255 ), ( 153, 255, 204 ), ( 153, 204, 204 ),
( 153, 153, 204 ), ( 153, 102, 204 ), ( 153, 51, 204 ), ( 153, 0, 204 ),
( 153, 255, 153 ), ( 153, 204, 153 ), ( 153, 153, 153 ), ( 153, 102, 153 ),
( 153, 51, 153 ), ( 153, 0, 153 ), ( 102, 255, 255 ), ( 102, 204, 255 ),
( 102, 153, 255 ), ( 102, 102, 255 ), ( 102, 51, 255 ), ( 102, 0, 255 ),
( 102, 255, 204 ), ( 102, 204, 204 ), ( 102, 153, 204 ), ( 102, 102, 204 ),
( 102, 51, 204 ), ( 102, 0, 204 ), ( 102, 255, 153 ), ( 102, 204, 153 ),
( 102, 153, 153 ), ( 102, 102, 153 ), ( 102, 51, 153 ), ( 102, 0, 153 ),
( 51, 255, 255 ), ( 51, 204, 255 ), ( 51, 153, 255 ), ( 51, 102, 255 ),
( 51, 51, 255 ), ( 51, 0, 255 ), ( 51, 255, 204 ), ( 51, 204, 204 ),
( 51, 153, 204 ), ( 51, 102, 204 ), ( 51, 51, 204 ), ( 51, 0, 204 ),
( 51, 255, 153 ), ( 51, 204, 153 ), ( 51, 153, 153 ), ( 51, 102, 153 ),
( 51, 51, 153 ), ( 51, 0, 153 ), ( 0, 255, 255 ), ( 0, 204, 255 ),
( 0, 153, 255 ), ( 0, 102, 255 ), ( 0, 51, 255 ), ( 0, 0, 255 ),
( 0, 255, 204 ), ( 0, 204, 204 ), ( 0, 153, 204 ), ( 0, 102, 204 ),
( 0, 51, 204 ), ( 0, 0, 204 ), ( 0, 255, 153 ), ( 0, 204, 153 ),
( 0, 153, 153 ), ( 0, 102, 153 ), ( 0, 51, 153 ), ( 0, 0, 153 ),
( 255, 255, 102 ), ( 255, 204, 102 ), ( 255, 153, 102 ), ( 255, 102, 102 ),
( 255, 51, 102 ), ( 255, 0, 102 ), ( 255, 255, 51 ), ( 255, 204, 51 ),
( 255, 153, 51 ), ( 255, 102, 51 ), ( 255, 51, 51 ), ( 255, 0, 51 ),
( 255, 255, 0 ), ( 255, 204, 0 ), ( 255, 153, 0 ), ( 255, 102, 0 ),
( 255, 51, 0 ), ( 255, 0, 0 ), ( 204, 255, 102 ), ( 204, 204, 102 ),
( 204, 153, 102 ), ( 204, 102, 102 ), ( 204, 51, 102 ), ( 204, 0, 102 ),
( 204, 255, 51 ), ( 204, 204, 51 ), ( 204, 153, 51 ), ( 204, 102, 51 ),
( 204, 51, 51 ), ( 204, 0, 51 ), ( 204, 255, 0 ), ( 204, 204, 0 ),
( 204, 153, 0 ), ( 204, 102, 0 ), ( 204, 51, 0 ), ( 204, 0, 0 ),
( 153, 255, 102 ), ( 153, 204, 102 ), ( 153, 153, 102 ), ( 153, 102, 102 ),
( 153, 51, 102 ), ( 153, 0, 102 ), ( 153, 255, 51 ), ( 153, 204, 51 ),
( 153, 153, 51 ), ( 153, 102, 51 ), ( 153, 51, 51 ), ( 153, 0, 51 ),
( 153, 255, 0 ), ( 153, 204, 0 ), ( 153, 153, 0 ), ( 153, 102, 0 ),
( 153, 51, 0 ), ( 153, 0, 0 ), ( 102, 255, 102 ), ( 102, 204, 102 ),
( 102, 153, 102 ), ( 102, 102, 102 ), ( 102, 51, 102 ), ( 102, 0, 102 ),
( 102, 255, 51 ), ( 102, 204, 51 ), ( 102, 153, 51 ), ( 102, 102, 51 ),
( 102, 51, 51 ), ( 102, 0, 51 ), ( 102, 255, 0 ), ( 102, 204, 0 ),
( 102, 153, 0 ), ( 102, 102, 0 ), ( 102, 51, 0 ), ( 102, 0, 0 ),
( 51, 255, 102 ), ( 51, 204, 102 ), ( 51, 153, 102 ), ( 51, 102, 102 ),
( 51, 51, 102 ), ( 51, 0, 102 ), ( 51, 255, 51 ), ( 51, 204, 51 ),
( 51, 153, 51 ), ( 51, 102, 51 ), ( 51, 51, 51 ), ( 51, 0, 51 ),
( 51, 255, 0 ), ( 51, 204, 0 ), ( 51, 153, 0 ), ( 51, 102, 0 ),
( 51, 51, 0 ), ( 51, 0, 0 ), ( 0, 255, 102 ), ( 0, 204, 102 ),
( 0, 153, 102 ), ( 0, 102, 102 ), ( 0, 51, 102 ), ( 0, 0, 102 ),
( 0, 255, 51 ), ( 0, 204, 51 ), ( 0, 153, 51 ), ( 0, 102, 51 ),
( 0, 51, 51 ), ( 0, 0, 51 ), ( 0, 255, 0 ), ( 0, 204, 0 ),
( 0, 153, 0 ), ( 0, 102, 0 ), ( 0, 51, 0 ), ( 17, 17, 17 ),
( 34, 34, 34 ), ( 68, 68, 68 ), ( 85, 85, 85 ), ( 119, 119, 119 ),
( 136, 136, 136 ), ( 170, 170, 170 ), ( 187, 187, 187 ), ( 221, 221, 221 ),
( 238, 238, 238 ), ( 192, 192, 192 ), ( 128, 0, 0 ), ( 128, 0, 128 ),
( 0, 128, 0 ), ( 0, 128, 128 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ),
( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ))
# so build a prototype image to be used for palette resampling
def build_prototype_image():
image = Image.new("L", (1,len(_Palm8BitColormapValues),))
image.putdata(range(len(_Palm8BitColormapValues)))
palettedata = ()
for i in range(len(_Palm8BitColormapValues)):
palettedata = palettedata + _Palm8BitColormapValues[i]
for i in range(256 - len(_Palm8BitColormapValues)):
palettedata = palettedata + (0, 0, 0)
image.putpalette(palettedata)
return image
Palm8BitColormapImage = build_prototype_image()
# OK, we now have in Palm8BitColormapImage, a "P"-mode image with the right palette
#
# --------------------------------------------------------------------
_FLAGS = {
"custom-colormap": 0x4000,
"is-compressed": 0x8000,
"has-transparent": 0x2000,
}
_COMPRESSION_TYPES = {
"none": 0xFF,
"rle": 0x01,
"scanline": 0x00,
}
def o16b(i):
return chr(i>>8&255) + chr(i&255)
#
# --------------------------------------------------------------------
##
# (Internal) Image save plugin for the Palm format.
def _save(im, fp, filename, check=0):
if im.mode == "P":
# we assume this is a color Palm image with the standard colormap,
# unless the "info" dict has a "custom-colormap" field
rawmode = "P"
bpp = 8
version = 1
elif im.mode == "L" and im.encoderinfo.has_key("bpp") and im.encoderinfo["bpp"] in (1, 2, 4):
# this is 8-bit grayscale, so we shift it to get the high-order bits, and invert it because
# Palm does greyscale from white (0) to black (1)
bpp = im.encoderinfo["bpp"]
im = im.point(lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift))
# we ignore the palette here
im.mode = "P"
rawmode = "P;" + str(bpp)
version = 1
elif im.mode == "L" and im.info.has_key("bpp") and im.info["bpp"] in (1, 2, 4):
# here we assume that even though the inherent mode is 8-bit grayscale, only
# the lower bpp bits are significant. We invert them to match the Palm.
bpp = im.info["bpp"]
im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval))
# we ignore the palette here
im.mode = "P"
rawmode = "P;" + str(bpp)
version = 1
elif im.mode == "1":
# monochrome -- write it inverted, as is the Palm standard
rawmode = "1;I"
bpp = 1
version = 0
else:
raise IOError, "cannot write mode %s as Palm" % im.mode
if check:
return check
#
# make sure image data is available
im.load()
# write header
cols = im.size[0]
rows = im.size[1]
rowbytes = ((cols + (16/bpp - 1)) / (16 / bpp)) * 2;
transparent_index = 0
compression_type = _COMPRESSION_TYPES["none"]
flags = 0;
if im.mode == "P" and im.info.has_key("custom-colormap"):
flags = flags & _FLAGS["custom-colormap"]
colormapsize = 4 * 256 + 2;
colormapmode = im.palette.mode
colormap = im.getdata().getpalette()
else:
colormapsize = 0
if im.info.has_key("offset"):
offset = (rowbytes * rows + 16 + 3 + colormapsize) / 4;
else:
offset = 0
fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags))
fp.write(chr(bpp))
fp.write(chr(version))
fp.write(o16b(offset))
fp.write(chr(transparent_index))
fp.write(chr(compression_type))
fp.write(o16b(0)) # reserved by Palm
# now write colormap if necessary
if colormapsize > 0:
fp.write(o16b(256))
for i in range(256):
fp.write(chr(i))
if colormapmode == 'RGB':
fp.write(chr(colormap[3 * i]) + chr(colormap[3 * i + 1]) + chr(colormap[3 * i + 2]))
elif colormapmode == 'RGBA':
fp.write(chr(colormap[4 * i]) + chr(colormap[4 * i + 1]) + chr(colormap[4 * i + 2]))
# now convert data to raw form
ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, rowbytes, 1))])
fp.flush()
#
# --------------------------------------------------------------------
Image.register_save("Palm", _save)
Image.register_extension("Palm", ".palm")
Image.register_mime("Palm", "image/palm")
| gpl-2.0 |
zhaochao/fuel-main | fuelweb_test/helpers/fuel_actions.py | 4 | 5877 | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
import re
from proboscis.asserts import assert_equal
from fuelweb_test import logger
class FuelActions(object):
class BaseActions(object):
def __init__(self, admin_remote):
self.admin_remote = admin_remote
self.container = None
def execute_in_container(self, command, container=None, exit_code=None,
stdin=None):
if not container:
container = self.container
cmd = 'dockerctl shell {0} {1}'.format(container, command)
if stdin is not None:
cmd = 'echo "{0}" | {1}'.format(stdin, cmd)
result = self.admin_remote.execute(cmd)
if exit_code is not None:
assert_equal(exit_code,
result['exit_code'],
('Command {cmd} returned exit code "{e}", but '
'expected "{c}". Output: {out}; {err} ').format(
cmd=cmd,
e=result['exit_code'],
c=exit_code,
out=result['stdout'],
err=result['stderr']
))
return ''.join(result['stdout']).strip()
class Nailgun(BaseActions):
def __init__(self, admin_remote):
super(FuelActions.Nailgun, self).__init__(admin_remote)
self.container = 'nailgun'
def update_nailgun_settings_once(self, settings):
# temporary change Nailgun settings (until next container restart)
cfg_file = '/etc/nailgun/settings.yaml'
ng_settings = yaml.load(self.execute_in_container(
'cat {0}'.format(cfg_file), exit_code=0))
ng_settings.update(settings)
logger.debug('Uploading new nailgun settings: {}'.format(
ng_settings))
self.execute_in_container('tee {0}'.format(cfg_file),
stdin=yaml.dump(ng_settings),
exit_code=0)
def set_collector_address(self, host, port, ssl=False):
cmd = ("awk '/COLLECTOR.*URL/' /usr/lib/python2.6"
"/site-packages/nailgun/settings.yaml")
protocol = 'http' if not ssl else 'https'
parameters = {}
for p in self.execute_in_container(cmd, exit_code=0).split('\n'):
parameters[p.split(': ')[0]] = re.sub(
r'https?://\{collector_server\}',
'{0}://{1}:{2}'.format(protocol, host, port),
p.split(': ')[1])[1:-1]
logger.debug('Custom collector parameters: {0}'.format(parameters))
self.update_nailgun_settings_once(parameters)
if ssl:
# if test collector server doesn't have trusted SSL cert
# installed we have to use this hack in order to disable cert
# verification and allow using of self-signed SSL certificate
cmd = ("sed -i '/elf.verify/ s/True/False/' /usr/lib/python2.6"
"/site-packages/requests/sessions.py")
self.execute_in_container(cmd, exit_code=0)
def force_fuel_stats_sending(self):
log_file = '/var/log/nailgun/statsenderd.log'
# Rotate logs on restart in order to get rid of old errors
cmd = 'mv {0}{{,.backup_$(date +%s)}}'.format(log_file)
self.execute_in_container(cmd)
cmd = 'supervisorctl restart statsenderd'
self.execute_in_container(cmd, exit_code=0)
cmd = 'grep -sw "ERROR" {0}'.format(log_file)
try:
self.execute_in_container(cmd, exit_code=1)
except AssertionError:
logger.error(("Fuel stats were sent with errors! Check its log"
"s in {0} for details.").format(log_file))
raise
class Postgres(BaseActions):
def __init__(self, admin_remote):
super(FuelActions.Postgres, self).__init__(admin_remote)
self.container = 'postgres'
def run_query(self, db, query):
cmd = "su - postgres -c 'psql -qt -d {0} -c \"{1};\"'".format(
db, query)
return self.execute_in_container(cmd, exit_code=0)
def action_logs_contain(self, action, group=False,
table='action_logs'):
logger.info("Checking that '{0}' action was logged..".format(
action))
log_filter = "action_name" if not group else "action_group"
q = "select id from {0} where {1} = '\"'\"'{2}'\"'\"'".format(
table, log_filter, action)
logs = [i.strip() for i in self.run_query('nailgun', q).split('\n')
if re.compile(r'\d+').match(i.strip())]
logger.info("Found log records with ids: {0}".format(logs))
return len(logs) > 0
def count_sent_action_logs(self, table='action_logs'):
q = "select count(id) from {0} where is_sent = True".format(table)
return int(self.run_query('nailgun', q))
| apache-2.0 |
DanilMarchyshyn/python_traning | test/test_add_pozov.py | 1 | 2732 | # -*- coding: utf-8 -*-
from selenium.webdriver.firefox.webdriver import WebDriver
import unittest
def is_alert_present(wd):
try:
wd.switch_to_alert().text
return True
except:
return False
class add_pozov(unittest.TestCase):
def setUp(self):
self.wd = WebDriver()
self.wd.implicitly_wait(60)
def test_add_pozov(self):
success = True
wd = self.wd
# open home page
wd.get("http://10.100.7.71:57775/osop/LoginForm.csp")
# login
wd.find_element_by_name("username").click()
wd.find_element_by_name("username").click()
wd.find_element_by_name("username").clear()
wd.find_element_by_name("username").send_keys("test17")
wd.find_element_by_name("password").click()
wd.find_element_by_name("password").clear()
wd.find_element_by_name("password").send_keys("bMS")
wd.find_element_by_name("password").click()
wd.find_element_by_name("password").clear()
wd.find_element_by_name("password").send_keys("bMS$2016")
wd.find_element_by_css_selector("input.btn.btn-primary").click()
wd.find_element_by_id("button-1015-btnInnerEl").click()
wd.find_element_by_id("menuitem-1017-textEl").click()
wd.find_element_by_id("menuitem-1044-textEl").click()
wd.find_element_by_id("button-1087-btnInnerEl").click()
wd.find_element_by_id("combo-1195-trigger-picker").click()
wd.find_element_by_id("ext-element-16").click()
wd.find_element_by_id("combo-1196-trigger-picker").click()
wd.find_element_by_id("ext-element-17").click()
wd.find_element_by_id("treecombo-1199-trigger-picker").click()
wd.find_element_by_xpath("//table[@id='treeview-1203-record-92']//span[.='З інших питань']").click()
wd.find_element_by_xpath("//table[@id='treeview-1203-record-91']/tbody/tr/td/div/span").click()
wd.find_element_by_id("textfield-1215-inputEl").click()
wd.find_element_by_id("textfield-1215-inputEl").clear()
wd.find_element_by_id("textfield-1215-inputEl").send_keys("1")
wd.find_element_by_id("textfield-1216-inputEl").click()
wd.find_element_by_id("textfield-1216-inputEl").clear()
wd.find_element_by_id("textfield-1216-inputEl").send_keys("2")
wd.find_element_by_id("button-1302-btnInnerEl").click()
wd.find_element_by_id("button-1005-btnInnerEl").click()
wd.find_element_by_id("button-1038-btnInnerEl").click()
wd.find_element_by_id("button-1006-btnInnerEl").click()
self.assertTrue(success)
def tearDown(self):
self.wd.quit()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
EUDAT-B2SHARE/invenio-old | modules/miscutil/lib/paginationutils.py | 7 | 1948 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio pagination helper class.
"""
from math import ceil
class Pagination(object):
"""Helps with rendering pagination list."""
def __init__(self, page, per_page, total_count):
self.page = page
self.per_page = per_page
self.total_count = total_count
@property
def pages(self):
"""Returns number of pages."""
return int(ceil(self.total_count / float(self.per_page)))
@property
def has_prev(self):
"""Returns true if it has previous page."""
return self.page > 1
@property
def has_next(self):
"""Returns true if it has next page."""
return self.page < self.pages
def iter_pages(self, left_edge=1, left_current=1,
right_current=3, right_edge=1):
last = 0
for num in xrange(1, self.pages + 1):
if num <= left_edge or \
(num > self.page - left_current - 1 and
num < self.page + right_current) or \
num > self.pages - right_edge:
if last + 1 != num:
yield None
yield num
last = num
| gpl-2.0 |
kostaspl/SpiderMonkey38 | testing/mozbase/mozrunner/mozrunner/base/runner.py | 4 | 7303 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from abc import ABCMeta, abstractproperty
import os
import subprocess
import traceback
from mozlog.structured import get_default_logger
from mozprocess import ProcessHandler
import mozcrash
from ..application import DefaultContext
from ..errors import RunnerNotStartedError
class BaseRunner(object):
"""
The base runner class for all mozrunner objects, both local and remote.
"""
__metaclass__ = ABCMeta
last_test = 'mozrunner-startup'
process_handler = None
timeout = None
output_timeout = None
def __init__(self, app_ctx=None, profile=None, clean_profile=True, env=None,
process_class=None, process_args=None, symbols_path=None, dump_save_path=None):
self.app_ctx = app_ctx or DefaultContext()
if isinstance(profile, basestring):
self.profile = self.app_ctx.profile_class(profile=profile)
else:
self.profile = profile or self.app_ctx.profile_class(**getattr(self.app_ctx, 'profile_args', {}))
# process environment
if env is None:
self.env = os.environ.copy()
else:
self.env = env.copy()
self.clean_profile = clean_profile
self.process_class = process_class or ProcessHandler
self.process_args = process_args or {}
self.symbols_path = symbols_path
self.dump_save_path = dump_save_path
self.crashed = 0
def __del__(self):
self.cleanup()
@abstractproperty
def command(self):
"""Returns the command list to run."""
pass
@property
def returncode(self):
"""
The returncode of the process_handler. A value of None
indicates the process is still running. A negative
value indicates the process was killed with the
specified signal.
:raises: RunnerNotStartedError
"""
if self.process_handler:
return self.process_handler.poll()
else:
raise RunnerNotStartedError("returncode accessed before runner started")
def start(self, debug_args=None, interactive=False, timeout=None, outputTimeout=None):
"""
Run self.command in the proper environment.
:param debug_args: arguments for a debugger
:param interactive: uses subprocess.Popen directly
:param timeout: see process_handler.run()
:param outputTimeout: see process_handler.run()
:returns: the process id
"""
self.timeout = timeout
self.output_timeout = outputTimeout
cmd = self.command
# ensure the runner is stopped
self.stop()
# attach a debugger, if specified
if debug_args:
cmd = list(debug_args) + cmd
if interactive:
self.process_handler = subprocess.Popen(cmd, env=self.env)
# TODO: other arguments
else:
# this run uses the managed processhandler
self.process_handler = self.process_class(cmd, env=self.env, **self.process_args)
self.process_handler.run(self.timeout, self.output_timeout)
self.crashed = 0
return self.process_handler.pid
def wait(self, timeout=None):
"""
Wait for the process to exit.
:param timeout: if not None, will return after timeout seconds.
Timeout is ignored if interactive was set to True.
:returns: the process return code if process exited normally,
-<signal> if process was killed (Unix only),
None if timeout was reached and the process is still running.
:raises: RunnerNotStartedError
"""
if self.is_running():
# The interactive mode uses directly a Popen process instance. It's
# wait() method doesn't have any parameters. So handle it separately.
if isinstance(self.process_handler, subprocess.Popen):
self.process_handler.wait()
else:
self.process_handler.wait(timeout)
elif not self.process_handler:
raise RunnerNotStartedError("Wait() called before process started")
return self.returncode
def is_running(self):
"""
Checks if the process is running.
:returns: True if the process is active
"""
return self.returncode is None
def stop(self, sig=None):
"""
Kill the process.
:param sig: Signal used to kill the process, defaults to SIGKILL
(has no effect on Windows).
:returns: the process return code if process was already stopped,
-<signal> if process was killed (Unix only)
:raises: RunnerNotStartedError
"""
try:
if not self.is_running():
return self.returncode
except RunnerNotStartedError:
return
# The interactive mode uses directly a Popen process instance. It's
# kill() method doesn't have any parameters. So handle it separately.
if isinstance(self.process_handler, subprocess.Popen):
self.process_handler.kill()
else:
self.process_handler.kill(sig=sig)
return self.returncode
def reset(self):
"""
Reset the runner to its default state.
"""
self.stop()
self.process_handler = None
def check_for_crashes(self, dump_directory=None, dump_save_path=None,
test_name=None, quiet=False):
"""
Check for a possible crash and output stack trace.
:param dump_directory: Directory to search for minidump files
:param dump_save_path: Directory to save the minidump files to
:param test_name: Name to use in the crash output
:param quiet: If `True` don't print the PROCESS-CRASH message to stdout
:returns: True if a crash was detected, otherwise False
"""
if not dump_directory:
dump_directory = os.path.join(self.profile.profile, 'minidumps')
if not dump_save_path:
dump_save_path = self.dump_save_path
try:
logger = get_default_logger()
if logger is not None:
if test_name is None:
test_name = "runner.py"
self.crashed += mozcrash.log_crashes(
logger,
dump_directory,
self.symbols_path,
dump_save_path=dump_save_path,
test=test_name)
else:
crashed = mozcrash.check_for_crashes(
dump_directory,
self.symbols_path,
dump_save_path=dump_save_path,
test_name=test_name,
quiet=quiet)
if crashed:
self.crashed += 1
except:
traceback.print_exc()
return self.crashed
def cleanup(self):
"""
Cleanup all runner state
"""
self.stop()
| mpl-2.0 |
blazek/QGIS | scripts/context_help_id.py | 25 | 2268 | #!/usr/bin/env python3
"""
/***************************************************************************
context_help_id.py
-------------------
begin : 2009-11-16
copyright : (C) 2009 by Gary E.Sherman
email : gsherman at mrcc.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
This script generates a unique context id based for use in the QGIS
context sensitive help system. It uses the SHA1 hash for the class name
and converts the first 12 characters to a unique integer.
To create a context id, pass the name of the QGIS class on the command line.
Example:
./context_help_id.py QgsAbout
This script requires Python 2.5 or higher (hashlib was introduced at 2.5).
NOTE: Due to a change in the way context ids are generated, ids
generated by the old method (Java hashCode function) will be different than
the id generated by the new method for the same class.
"""
import hashlib
import sys
# check to see if a class name was specified and if so, create the context id
if len(sys.argv) > 1:
hash = hashlib.sha1()
# set the hash to the name passed on the command line
hash.update(sys.argv[1])
# generate the context id by converting the first 12 characters of the hash
# to decimal
context_id = int(hash.hexdigest()[:12], 16)
# print the result
print context_id
else:
# if no class name was specified, give a bit of help
print "To generate a context sensitive help id, specify the QGIS class name on the command line"
| gpl-2.0 |
xaviercobain88/framework-python | build/lib.linux-i686-2.7/openerp/addons/sale_journal/sale_journal.py | 46 | 4031 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class sale_journal_invoice_type(osv.osv):
_name = 'sale_journal.invoice.type'
_description = 'Invoice Types'
_columns = {
'name': fields.char('Invoice Type', size=64, required=True),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the invoice type without removing it."),
'note': fields.text('Note'),
'invoicing_method': fields.selection([('simple', 'Non grouped'), ('grouped', 'Grouped')], 'Invoicing method', required=True),
}
_defaults = {
'active': True,
'invoicing_method': 'simple'
}
sale_journal_invoice_type()
#==============================================
# sale journal inherit
#==============================================
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'property_invoice_type': fields.property(
'sale_journal.invoice.type',
type = 'many2one',
relation = 'sale_journal.invoice.type',
string = "Invoicing Type",
view_load = True,
group_name = "Accounting Properties",
help = "This invoicing type will be used, by default, to invoice the current partner."),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + ['property_invoice_type']
res_partner()
class picking(osv.osv):
_inherit = "stock.picking"
_columns = {
'invoice_type_id': fields.many2one('sale_journal.invoice.type', 'Invoice Type', readonly=True)
}
picking()
class stock_picking_in(osv.osv):
_inherit = "stock.picking.in"
_columns = {
'invoice_type_id': fields.many2one('sale_journal.invoice.type', 'Invoice Type', readonly=True)
}
stock_picking_in()
class stock_picking_out(osv.osv):
_inherit = "stock.picking.out"
_columns = {
'invoice_type_id': fields.many2one('sale_journal.invoice.type', 'Invoice Type', readonly=True)
}
stock_picking_out()
class sale(osv.osv):
_inherit = "sale.order"
_columns = {
'invoice_type_id': fields.many2one('sale_journal.invoice.type', 'Invoice Type', help="Generate invoice based on the selected option.")
}
def _prepare_order_picking(self, cr, uid, order, context=None):
result = super(sale,self)._prepare_order_picking(cr, uid, order, context=context)
result.update(invoice_type_id=order.invoice_type_id and order.invoice_type_id.id or False)
return result
def onchange_partner_id(self, cr, uid, ids, part, context=None):
result = super(sale, self).onchange_partner_id(cr, uid, ids, part, context=context)
if part:
itype = self.pool.get('res.partner').browse(cr, uid, part, context=context).property_invoice_type
if itype:
result['value']['invoice_type_id'] = itype.id
return result
sale()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bitifirefly/edx-platform | cms/djangoapps/contentstore/tests/test_i18n.py | 133 | 3282 | from unittest import skip
from django.contrib.auth.models import User
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from contentstore.tests.utils import AjaxEnabledTestClient
class InternationalizationTest(ModuleStoreTestCase):
"""
Tests to validate Internationalization.
"""
def setUp(self):
"""
These tests need a user in the DB so that the django Test Client
can log them in.
They inherit from the ModuleStoreTestCase class so that the mongodb collection
will be cleared out before each test case execution and deleted
afterwards.
"""
super(InternationalizationTest, self).setUp(create_user=False)
self.uname = 'testuser'
self.email = 'test+courses@edx.org'
self.password = 'foo'
# Create the use so we can log them in.
self.user = User.objects.create_user(self.uname, self.email, self.password)
# Note that we do not actually need to do anything
# for registration if we directly mark them active.
self.user.is_active = True
# Staff has access to view all courses
self.user.is_staff = True
self.user.save()
self.course_data = {
'org': 'MITx',
'number': '999',
'display_name': 'Robot Super Course',
}
def test_course_plain_english(self):
"""Test viewing the index page with no courses"""
self.client = AjaxEnabledTestClient()
self.client.login(username=self.uname, password=self.password)
resp = self.client.get_html('/home/')
self.assertContains(resp,
'<h1 class="page-header">Studio Home</h1>',
status_code=200,
html=True)
def test_course_explicit_english(self):
"""Test viewing the index page with no courses"""
self.client = AjaxEnabledTestClient()
self.client.login(username=self.uname, password=self.password)
resp = self.client.get_html(
'/home/',
{},
HTTP_ACCEPT_LANGUAGE='en',
)
self.assertContains(resp,
'<h1 class="page-header">Studio Home</h1>',
status_code=200,
html=True)
# ****
# NOTE:
# ****
#
# This test will break when we replace this fake 'test' language
# with actual Esperanto. This test will need to be updated with
# actual Esperanto at that time.
# Test temporarily disable since it depends on creation of dummy strings
@skip
def test_course_with_accents(self):
"""Test viewing the index page with no courses"""
self.client = AjaxEnabledTestClient()
self.client.login(username=self.uname, password=self.password)
resp = self.client.get_html(
'/home/',
{},
HTTP_ACCEPT_LANGUAGE='eo'
)
TEST_STRING = (
u'<h1 class="title-1">'
u'My \xc7\xf6\xfcrs\xe9s L#'
u'</h1>'
)
self.assertContains(resp,
TEST_STRING,
status_code=200,
html=True)
| agpl-3.0 |
frac/lettuce | tests/functional/simple_features/2nd_feature_dir/step_definitions/sum_steps.py | 1 | 1202 | # -*- coding: utf-8 -*-
# <Lettuce - Behaviour Driven Development for python>
# Copyright (C) <2010> Gabriel Falcão <gabriel@nacaolivre.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from nose.tools import assert_equals
from lettuce import step
from lettuce.terrain import world
from lettuce import after
@step('I sum (\d+) and (\d+)')
def i_sum_x_and_y(step, x, y):
world.sum = int(x) + int(y)
@step('it should result in (\d+)')
def it_should_result_in_z(step, z):
assert_equals(world.sum, int(z))
@after.all
def clear_sum(total_results):
if hasattr(world, 'sum'):
del world.sum
| gpl-3.0 |
argivaitv/argivaitv | plugin.video.phstreams/resources/lib/libraries/f4mproxy/utils/codec.py | 88 | 2613 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""Classes for reading/writing binary data (such as TLS records)."""
from .compat import *
class Writer(object):
def __init__(self):
self.bytes = bytearray(0)
def add(self, x, length):
self.bytes += bytearray(length)
newIndex = len(self.bytes) - 1
for count in range(length):
self.bytes[newIndex] = x & 0xFF
x >>= 8
newIndex -= 1
def addFixSeq(self, seq, length):
for e in seq:
self.add(e, length)
def addVarSeq(self, seq, length, lengthLength):
self.add(len(seq)*length, lengthLength)
for e in seq:
self.add(e, length)
class Parser(object):
def __init__(self, bytes):
self.bytes = bytes
self.index = 0
def get(self, length):
if self.index + length > len(self.bytes):
raise SyntaxError()
x = 0
for count in range(length):
x <<= 8
x |= self.bytes[self.index]
self.index += 1
return x
def getFixBytes(self, lengthBytes):
if self.index + lengthBytes > len(self.bytes):
raise SyntaxError()
bytes = self.bytes[self.index : self.index+lengthBytes]
self.index += lengthBytes
return bytes
def getVarBytes(self, lengthLength):
lengthBytes = self.get(lengthLength)
return self.getFixBytes(lengthBytes)
def getFixList(self, length, lengthList):
l = [0] * lengthList
for x in range(lengthList):
l[x] = self.get(length)
return l
def getVarList(self, length, lengthLength):
lengthList = self.get(lengthLength)
if lengthList % length != 0:
raise SyntaxError()
lengthList = lengthList // length
l = [0] * lengthList
for x in range(lengthList):
l[x] = self.get(length)
return l
def startLengthCheck(self, lengthLength):
self.lengthCheck = self.get(lengthLength)
self.indexCheck = self.index
def setLengthCheck(self, length):
self.lengthCheck = length
self.indexCheck = self.index
def stopLengthCheck(self):
if (self.index - self.indexCheck) != self.lengthCheck:
raise SyntaxError()
def atLengthCheck(self):
if (self.index - self.indexCheck) < self.lengthCheck:
return False
elif (self.index - self.indexCheck) == self.lengthCheck:
return True
else:
raise SyntaxError()
| gpl-2.0 |
knehez/edx-platform | openedx/core/djangoapps/user_api/tests/test_views.py | 16 | 67385 | """Tests for the user API at the HTTP request level. """
import datetime
import base64
import json
import re
from unittest import skipUnless, SkipTest
import ddt
import httpretty
from pytz import UTC
import mock
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.testcases import TransactionTestCase
from django.test.utils import override_settings
from social.apps.django_app.default.models import UserSocialAuth
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django_comment_common import models
from student.tests.factories import UserFactory
from third_party_auth.tests.testutil import simulate_running_pipeline, ThirdPartyAuthTestMixin
from third_party_auth.tests.utils import (
ThirdPartyOAuthTestMixin, ThirdPartyOAuthTestMixinFacebook, ThirdPartyOAuthTestMixinGoogle
)
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from ..accounts.api import get_account_settings
from ..accounts import (
NAME_MAX_LENGTH, EMAIL_MIN_LENGTH, EMAIL_MAX_LENGTH, PASSWORD_MIN_LENGTH, PASSWORD_MAX_LENGTH,
USERNAME_MIN_LENGTH, USERNAME_MAX_LENGTH
)
from ..models import UserOrgTag
from ..tests.factories import UserPreferenceFactory
from ..tests.test_constants import SORTED_COUNTRIES
TEST_API_KEY = "test_api_key"
USER_LIST_URI = "/user_api/v1/users/"
USER_PREFERENCE_LIST_URI = "/user_api/v1/user_prefs/"
ROLE_LIST_URI = "/user_api/v1/forum_roles/Moderator/users/"
@override_settings(EDX_API_KEY=TEST_API_KEY)
class ApiTestCase(TestCase):
LIST_URI = USER_LIST_URI
def basic_auth(self, username, password):
return {'HTTP_AUTHORIZATION': 'Basic ' + base64.b64encode('%s:%s' % (username, password))}
def request_with_auth(self, method, *args, **kwargs):
"""Issue a get request to the given URI with the API key header"""
return getattr(self.client, method)(*args, HTTP_X_EDX_API_KEY=TEST_API_KEY, **kwargs)
def get_json(self, *args, **kwargs):
"""Make a request with the given args and return the parsed JSON repsonse"""
resp = self.request_with_auth("get", *args, **kwargs)
self.assertHttpOK(resp)
self.assertTrue(resp["Content-Type"].startswith("application/json"))
return json.loads(resp.content)
def get_uri_for_user(self, target_user):
"""Given a user object, get the URI for the corresponding resource"""
users = self.get_json(USER_LIST_URI)["results"]
for user in users:
if user["id"] == target_user.id:
return user["url"]
self.fail()
def get_uri_for_pref(self, target_pref):
"""Given a user preference object, get the URI for the corresponding resource"""
prefs = self.get_json(USER_PREFERENCE_LIST_URI)["results"]
for pref in prefs:
if (pref["user"]["id"] == target_pref.user.id and pref["key"] == target_pref.key):
return pref["url"]
self.fail()
def assertAllowedMethods(self, uri, expected_methods):
"""Assert that the allowed methods for the given URI match the expected list"""
resp = self.request_with_auth("options", uri)
self.assertHttpOK(resp)
allow_header = resp.get("Allow")
self.assertIsNotNone(allow_header)
allowed_methods = re.split('[^A-Z]+', allow_header)
self.assertItemsEqual(allowed_methods, expected_methods)
def assertSelfReferential(self, obj):
"""Assert that accessing the "url" entry in the given object returns the same object"""
copy = self.get_json(obj["url"])
self.assertEqual(obj, copy)
def assertUserIsValid(self, user):
"""Assert that the given user result is valid"""
self.assertItemsEqual(user.keys(), ["email", "id", "name", "username", "preferences", "url"])
self.assertItemsEqual(
user["preferences"].items(),
[(pref.key, pref.value) for pref in self.prefs if pref.user.id == user["id"]]
)
self.assertSelfReferential(user)
def assertPrefIsValid(self, pref):
self.assertItemsEqual(pref.keys(), ["user", "key", "value", "url"])
self.assertSelfReferential(pref)
self.assertUserIsValid(pref["user"])
def assertHttpOK(self, response):
"""Assert that the given response has the status code 200"""
self.assertEqual(response.status_code, 200)
def assertHttpForbidden(self, response):
"""Assert that the given response has the status code 403"""
self.assertEqual(response.status_code, 403)
def assertHttpBadRequest(self, response):
"""Assert that the given response has the status code 400"""
self.assertEqual(response.status_code, 400)
def assertHttpMethodNotAllowed(self, response):
"""Assert that the given response has the status code 405"""
self.assertEqual(response.status_code, 405)
def assertAuthDisabled(self, method, uri):
"""
Assert that the Django rest framework does not interpret basic auth
headers for views exposed to anonymous users as an attempt to authenticate.
"""
# Django rest framework interprets basic auth headers
# as an attempt to authenticate with the API.
# We don't want this for views available to anonymous users.
basic_auth_header = "Basic " + base64.b64encode('username:password')
response = getattr(self.client, method)(uri, HTTP_AUTHORIZATION=basic_auth_header)
self.assertNotEqual(response.status_code, 403)
class EmptyUserTestCase(ApiTestCase):
def test_get_list_empty(self):
result = self.get_json(self.LIST_URI)
self.assertEqual(result["count"], 0)
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
self.assertEqual(result["results"], [])
class EmptyRoleTestCase(ApiTestCase):
"""Test that the endpoint supports empty result sets"""
course_id = SlashSeparatedCourseKey.from_deprecated_string("org/course/run")
LIST_URI = ROLE_LIST_URI + "?course_id=" + course_id.to_deprecated_string()
def test_get_list_empty(self):
"""Test that the endpoint properly returns empty result sets"""
result = self.get_json(self.LIST_URI)
self.assertEqual(result["count"], 0)
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
self.assertEqual(result["results"], [])
class UserApiTestCase(ApiTestCase):
def setUp(self):
super(UserApiTestCase, self).setUp()
self.users = [
UserFactory.create(
email="test{0}@test.org".format(i),
profile__name="Test {0}".format(i)
)
for i in range(5)
]
self.prefs = [
UserPreferenceFactory.create(user=self.users[0], key="key0"),
UserPreferenceFactory.create(user=self.users[0], key="key1"),
UserPreferenceFactory.create(user=self.users[1], key="key0")
]
class RoleTestCase(UserApiTestCase):
course_id = SlashSeparatedCourseKey.from_deprecated_string("org/course/run")
LIST_URI = ROLE_LIST_URI + "?course_id=" + course_id.to_deprecated_string()
def setUp(self):
super(RoleTestCase, self).setUp()
(role, _) = models.Role.objects.get_or_create(
name=models.FORUM_ROLE_MODERATOR,
course_id=self.course_id
)
for user in self.users:
user.roles.add(role)
def test_options_list(self):
self.assertAllowedMethods(self.LIST_URI, ["OPTIONS", "GET", "HEAD"])
def test_post_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("post", self.LIST_URI))
def test_put_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.LIST_URI))
def test_patch_list_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.LIST_URI))
def test_list_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.LIST_URI))
@override_settings(DEBUG=True)
@override_settings(EDX_API_KEY=None)
def test_debug_auth(self):
self.assertHttpOK(self.client.get(self.LIST_URI))
@override_settings(DEBUG=False)
@override_settings(EDX_API_KEY=TEST_API_KEY)
def test_basic_auth(self):
# ensure that having basic auth headers in the mix does not break anything
self.assertHttpOK(
self.request_with_auth("get", self.LIST_URI,
**self.basic_auth("someuser", "somepass")))
self.assertHttpForbidden(
self.client.get(self.LIST_URI, **self.basic_auth("someuser", "somepass")))
def test_get_list_nonempty(self):
result = self.get_json(self.LIST_URI)
users = result["results"]
self.assertEqual(result["count"], len(self.users))
self.assertEqual(len(users), len(self.users))
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
for user in users:
self.assertUserIsValid(user)
def test_required_parameter(self):
response = self.request_with_auth("get", ROLE_LIST_URI)
self.assertHttpBadRequest(response)
def test_get_list_pagination(self):
first_page = self.get_json(self.LIST_URI, data={
"page_size": 3,
"course_id": self.course_id.to_deprecated_string(),
})
self.assertEqual(first_page["count"], 5)
first_page_next_uri = first_page["next"]
self.assertIsNone(first_page["previous"])
first_page_users = first_page["results"]
self.assertEqual(len(first_page_users), 3)
second_page = self.get_json(first_page_next_uri)
self.assertEqual(second_page["count"], 5)
self.assertIsNone(second_page["next"])
second_page_prev_uri = second_page["previous"]
second_page_users = second_page["results"]
self.assertEqual(len(second_page_users), 2)
self.assertEqual(self.get_json(second_page_prev_uri), first_page)
for user in first_page_users + second_page_users:
self.assertUserIsValid(user)
all_user_uris = [user["url"] for user in first_page_users + second_page_users]
self.assertEqual(len(set(all_user_uris)), 5)
class UserViewSetTest(UserApiTestCase):
LIST_URI = USER_LIST_URI
def setUp(self):
super(UserViewSetTest, self).setUp()
self.detail_uri = self.get_uri_for_user(self.users[0])
# List view tests
def test_options_list(self):
self.assertAllowedMethods(self.LIST_URI, ["OPTIONS", "GET", "HEAD"])
def test_post_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("post", self.LIST_URI))
def test_put_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.LIST_URI))
def test_patch_list_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.LIST_URI))
def test_list_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.LIST_URI))
@override_settings(DEBUG=True)
@override_settings(EDX_API_KEY=None)
def test_debug_auth(self):
self.assertHttpOK(self.client.get(self.LIST_URI))
@override_settings(DEBUG=False)
@override_settings(EDX_API_KEY=TEST_API_KEY)
def test_basic_auth(self):
# ensure that having basic auth headers in the mix does not break anything
self.assertHttpOK(
self.request_with_auth("get", self.LIST_URI,
**self.basic_auth('someuser', 'somepass')))
self.assertHttpForbidden(
self.client.get(self.LIST_URI, **self.basic_auth('someuser', 'somepass')))
def test_get_list_nonempty(self):
result = self.get_json(self.LIST_URI)
self.assertEqual(result["count"], 5)
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
users = result["results"]
self.assertEqual(len(users), 5)
for user in users:
self.assertUserIsValid(user)
def test_get_list_pagination(self):
first_page = self.get_json(self.LIST_URI, data={"page_size": 3})
self.assertEqual(first_page["count"], 5)
first_page_next_uri = first_page["next"]
self.assertIsNone(first_page["previous"])
first_page_users = first_page["results"]
self.assertEqual(len(first_page_users), 3)
second_page = self.get_json(first_page_next_uri)
self.assertEqual(second_page["count"], 5)
self.assertIsNone(second_page["next"])
second_page_prev_uri = second_page["previous"]
second_page_users = second_page["results"]
self.assertEqual(len(second_page_users), 2)
self.assertEqual(self.get_json(second_page_prev_uri), first_page)
for user in first_page_users + second_page_users:
self.assertUserIsValid(user)
all_user_uris = [user["url"] for user in first_page_users + second_page_users]
self.assertEqual(len(set(all_user_uris)), 5)
# Detail view tests
def test_options_detail(self):
self.assertAllowedMethods(self.detail_uri, ["OPTIONS", "GET", "HEAD"])
def test_post_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("post", self.detail_uri))
def test_put_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.detail_uri))
def test_patch_detail_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.detail_uri))
def test_get_detail_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.detail_uri))
def test_get_detail(self):
user = self.users[1]
uri = self.get_uri_for_user(user)
self.assertEqual(
self.get_json(uri),
{
"email": user.email,
"id": user.id,
"name": user.profile.name,
"username": user.username,
"preferences": dict([
(user_pref.key, user_pref.value)
for user_pref in self.prefs
if user_pref.user == user
]),
"url": uri
}
)
class UserPreferenceViewSetTest(UserApiTestCase):
LIST_URI = USER_PREFERENCE_LIST_URI
def setUp(self):
super(UserPreferenceViewSetTest, self).setUp()
self.detail_uri = self.get_uri_for_pref(self.prefs[0])
# List view tests
def test_options_list(self):
self.assertAllowedMethods(self.LIST_URI, ["OPTIONS", "GET", "HEAD"])
def test_put_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.LIST_URI))
def test_patch_list_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_list_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.LIST_URI))
def test_list_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.LIST_URI))
@override_settings(DEBUG=True)
@override_settings(EDX_API_KEY=None)
def test_debug_auth(self):
self.assertHttpOK(self.client.get(self.LIST_URI))
def test_get_list_nonempty(self):
result = self.get_json(self.LIST_URI)
self.assertEqual(result["count"], 3)
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
prefs = result["results"]
self.assertEqual(len(prefs), 3)
for pref in prefs:
self.assertPrefIsValid(pref)
def test_get_list_filter_key_empty(self):
result = self.get_json(self.LIST_URI, data={"key": "non-existent"})
self.assertEqual(result["count"], 0)
self.assertEqual(result["results"], [])
def test_get_list_filter_key_nonempty(self):
result = self.get_json(self.LIST_URI, data={"key": "key0"})
self.assertEqual(result["count"], 2)
prefs = result["results"]
self.assertEqual(len(prefs), 2)
for pref in prefs:
self.assertPrefIsValid(pref)
self.assertEqual(pref["key"], "key0")
def test_get_list_filter_user_empty(self):
def test_id(user_id):
result = self.get_json(self.LIST_URI, data={"user": user_id})
self.assertEqual(result["count"], 0)
self.assertEqual(result["results"], [])
test_id(self.users[2].id)
# TODO: If the given id does not match a user, then the filter is a no-op
# test_id(42)
# test_id("asdf")
def test_get_list_filter_user_nonempty(self):
user_id = self.users[0].id
result = self.get_json(self.LIST_URI, data={"user": user_id})
self.assertEqual(result["count"], 2)
prefs = result["results"]
self.assertEqual(len(prefs), 2)
for pref in prefs:
self.assertPrefIsValid(pref)
self.assertEqual(pref["user"]["id"], user_id)
def test_get_list_pagination(self):
first_page = self.get_json(self.LIST_URI, data={"page_size": 2})
self.assertEqual(first_page["count"], 3)
first_page_next_uri = first_page["next"]
self.assertIsNone(first_page["previous"])
first_page_prefs = first_page["results"]
self.assertEqual(len(first_page_prefs), 2)
second_page = self.get_json(first_page_next_uri)
self.assertEqual(second_page["count"], 3)
self.assertIsNone(second_page["next"])
second_page_prev_uri = second_page["previous"]
second_page_prefs = second_page["results"]
self.assertEqual(len(second_page_prefs), 1)
self.assertEqual(self.get_json(second_page_prev_uri), first_page)
for pref in first_page_prefs + second_page_prefs:
self.assertPrefIsValid(pref)
all_pref_uris = [pref["url"] for pref in first_page_prefs + second_page_prefs]
self.assertEqual(len(set(all_pref_uris)), 3)
# Detail view tests
def test_options_detail(self):
self.assertAllowedMethods(self.detail_uri, ["OPTIONS", "GET", "HEAD"])
def test_post_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("post", self.detail_uri))
def test_put_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.detail_uri))
def test_patch_detail_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_detail_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.detail_uri))
def test_detail_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.detail_uri))
def test_get_detail(self):
pref = self.prefs[1]
uri = self.get_uri_for_pref(pref)
self.assertEqual(
self.get_json(uri),
{
"user": {
"email": pref.user.email,
"id": pref.user.id,
"name": pref.user.profile.name,
"username": pref.user.username,
"preferences": dict([
(user_pref.key, user_pref.value)
for user_pref in self.prefs
if user_pref.user == pref.user
]),
"url": self.get_uri_for_user(pref.user),
},
"key": pref.key,
"value": pref.value,
"url": uri,
}
)
class PreferenceUsersListViewTest(UserApiTestCase):
LIST_URI = "/user_api/v1/preferences/key0/users/"
def test_options(self):
self.assertAllowedMethods(self.LIST_URI, ["OPTIONS", "GET", "HEAD"])
def test_put_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("put", self.LIST_URI))
def test_patch_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_delete_not_allowed(self):
self.assertHttpMethodNotAllowed(self.request_with_auth("delete", self.LIST_URI))
def test_unauthorized(self):
self.assertHttpForbidden(self.client.get(self.LIST_URI))
@override_settings(DEBUG=True)
@override_settings(EDX_API_KEY=None)
def test_debug_auth(self):
self.assertHttpOK(self.client.get(self.LIST_URI))
def test_get_basic(self):
result = self.get_json(self.LIST_URI)
self.assertEqual(result["count"], 2)
self.assertIsNone(result["next"])
self.assertIsNone(result["previous"])
users = result["results"]
self.assertEqual(len(users), 2)
for user in users:
self.assertUserIsValid(user)
def test_get_pagination(self):
first_page = self.get_json(self.LIST_URI, data={"page_size": 1})
self.assertEqual(first_page["count"], 2)
first_page_next_uri = first_page["next"]
self.assertIsNone(first_page["previous"])
first_page_users = first_page["results"]
self.assertEqual(len(first_page_users), 1)
second_page = self.get_json(first_page_next_uri)
self.assertEqual(second_page["count"], 2)
self.assertIsNone(second_page["next"])
second_page_prev_uri = second_page["previous"]
second_page_users = second_page["results"]
self.assertEqual(len(second_page_users), 1)
self.assertEqual(self.get_json(second_page_prev_uri), first_page)
for user in first_page_users + second_page_users:
self.assertUserIsValid(user)
all_user_uris = [user["url"] for user in first_page_users + second_page_users]
self.assertEqual(len(set(all_user_uris)), 2)
@ddt.ddt
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class LoginSessionViewTest(ApiTestCase):
"""Tests for the login end-points of the user API. """
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
def setUp(self):
super(LoginSessionViewTest, self).setUp()
self.url = reverse("user_api_login_session")
@ddt.data("get", "post")
def test_auth_disabled(self, method):
self.assertAuthDisabled(method, self.url)
def test_allowed_methods(self):
self.assertAllowedMethods(self.url, ["GET", "POST", "HEAD", "OPTIONS"])
def test_put_not_allowed(self):
response = self.client.put(self.url)
self.assertHttpMethodNotAllowed(response)
def test_delete_not_allowed(self):
response = self.client.delete(self.url)
self.assertHttpMethodNotAllowed(response)
def test_patch_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_login_form(self):
# Retrieve the login form
response = self.client.get(self.url, content_type="application/json")
self.assertHttpOK(response)
# Verify that the form description matches what we expect
form_desc = json.loads(response.content)
self.assertEqual(form_desc["method"], "post")
self.assertEqual(form_desc["submit_url"], self.url)
self.assertEqual(form_desc["fields"], [
{
"name": "email",
"defaultValue": "",
"type": "email",
"required": True,
"label": "Email",
"placeholder": "username@domain.com",
"instructions": "The email address you used to register with {platform_name}".format(
platform_name=settings.PLATFORM_NAME
),
"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
"errorMessages": {},
},
{
"name": "password",
"defaultValue": "",
"type": "password",
"required": True,
"label": "Password",
"placeholder": "",
"instructions": "",
"restrictions": {
"min_length": PASSWORD_MIN_LENGTH,
"max_length": PASSWORD_MAX_LENGTH
},
"errorMessages": {},
},
{
"name": "remember",
"defaultValue": False,
"type": "checkbox",
"required": False,
"label": "Remember me",
"placeholder": "",
"instructions": "",
"restrictions": {},
"errorMessages": {},
},
])
def test_login(self):
# Create a test user
UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
# Login
response = self.client.post(self.url, {
"email": self.EMAIL,
"password": self.PASSWORD,
})
self.assertHttpOK(response)
# Verify that we logged in successfully by accessing
# a page that requires authentication.
response = self.client.get(reverse("dashboard"))
self.assertHttpOK(response)
@ddt.data(
(json.dumps(True), False),
(json.dumps(False), True),
(None, True),
)
@ddt.unpack
def test_login_remember_me(self, remember_value, expire_at_browser_close):
# Create a test user
UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
# Login and remember me
data = {
"email": self.EMAIL,
"password": self.PASSWORD,
}
if remember_value is not None:
data["remember"] = remember_value
response = self.client.post(self.url, data)
self.assertHttpOK(response)
# Verify that the session expiration was set correctly
self.assertEqual(
self.client.session.get_expire_at_browser_close(),
expire_at_browser_close
)
def test_invalid_credentials(self):
# Create a test user
UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
# Invalid password
response = self.client.post(self.url, {
"email": self.EMAIL,
"password": "invalid"
})
self.assertHttpForbidden(response)
# Invalid email address
response = self.client.post(self.url, {
"email": "invalid@example.com",
"password": self.PASSWORD,
})
self.assertHttpForbidden(response)
def test_missing_login_params(self):
# Create a test user
UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
# Missing password
response = self.client.post(self.url, {
"email": self.EMAIL,
})
self.assertHttpBadRequest(response)
# Missing email
response = self.client.post(self.url, {
"password": self.PASSWORD,
})
self.assertHttpBadRequest(response)
# Missing both email and password
response = self.client.post(self.url, {})
@ddt.ddt
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class PasswordResetViewTest(ApiTestCase):
"""Tests of the user API's password reset endpoint. """
def setUp(self):
super(PasswordResetViewTest, self).setUp()
self.url = reverse("user_api_password_reset")
@ddt.data("get", "post")
def test_auth_disabled(self, method):
self.assertAuthDisabled(method, self.url)
def test_allowed_methods(self):
self.assertAllowedMethods(self.url, ["GET", "HEAD", "OPTIONS"])
def test_put_not_allowed(self):
response = self.client.put(self.url)
self.assertHttpMethodNotAllowed(response)
def test_delete_not_allowed(self):
response = self.client.delete(self.url)
self.assertHttpMethodNotAllowed(response)
def test_patch_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_password_reset_form(self):
# Retrieve the password reset form
response = self.client.get(self.url, content_type="application/json")
self.assertHttpOK(response)
# Verify that the form description matches what we expect
form_desc = json.loads(response.content)
self.assertEqual(form_desc["method"], "post")
self.assertEqual(form_desc["submit_url"], reverse("password_change_request"))
self.assertEqual(form_desc["fields"], [
{
"name": "email",
"defaultValue": "",
"type": "email",
"required": True,
"label": "Email",
"placeholder": "username@domain.com",
"instructions": "The email address you used to register with {platform_name}".format(
platform_name=settings.PLATFORM_NAME
),
"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
"errorMessages": {},
}
])
@ddt.ddt
@skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class RegistrationViewTest(ThirdPartyAuthTestMixin, ApiTestCase):
"""Tests for the registration end-points of the User API. """
maxDiff = None
USERNAME = "bob"
EMAIL = "bob@example.com"
PASSWORD = "password"
NAME = "Bob Smith"
EDUCATION = "m"
YEAR_OF_BIRTH = "1998"
ADDRESS = "123 Fake Street"
CITY = "Springfield"
COUNTRY = "us"
GOALS = "Learn all the things!"
def setUp(self):
super(RegistrationViewTest, self).setUp()
self.url = reverse("user_api_registration")
@ddt.data("get", "post")
def test_auth_disabled(self, method):
self.assertAuthDisabled(method, self.url)
def test_allowed_methods(self):
self.assertAllowedMethods(self.url, ["GET", "POST", "HEAD", "OPTIONS"])
def test_put_not_allowed(self):
response = self.client.put(self.url)
self.assertHttpMethodNotAllowed(response)
def test_delete_not_allowed(self):
response = self.client.delete(self.url)
self.assertHttpMethodNotAllowed(response)
def test_patch_not_allowed(self):
raise SkipTest("Django 1.4's test client does not support patch")
def test_register_form_default_fields(self):
no_extra_fields_setting = {}
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"email",
u"type": u"email",
u"required": True,
u"label": u"Email",
u"placeholder": u"username@domain.com",
u"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"name",
u"type": u"text",
u"required": True,
u"label": u"Full name",
u"placeholder": u"Jane Doe",
u"instructions": u"Needed for any certificates you may earn",
u"restrictions": {
"max_length": 255
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"username",
u"type": u"text",
u"required": True,
u"label": u"Public username",
u"placeholder": u"JaneDoe",
u"instructions": u"The name that will identify you in your courses - <strong>(cannot be changed later)</strong>",
u"restrictions": {
"min_length": USERNAME_MIN_LENGTH,
"max_length": USERNAME_MAX_LENGTH
},
}
)
self._assert_reg_field(
no_extra_fields_setting,
{
u"placeholder": "",
u"name": u"password",
u"type": u"password",
u"required": True,
u"label": u"Password",
u"restrictions": {
'min_length': PASSWORD_MIN_LENGTH,
'max_length': PASSWORD_MAX_LENGTH
# 'min_length': account_api.PASSWORD_MIN_LENGTH,
# 'max_length': account_api.PASSWORD_MAX_LENGTH
},
}
)
def test_register_form_third_party_auth_running(self):
no_extra_fields_setting = {}
self.configure_google_provider(enabled=True)
with simulate_running_pipeline(
"openedx.core.djangoapps.user_api.views.third_party_auth.pipeline",
"google-oauth2", email="bob@example.com",
fullname="Bob", username="Bob123"
):
# Password field should be hidden
self._assert_reg_field(
no_extra_fields_setting,
{
"name": "password",
"type": "hidden",
"required": False,
}
)
# Email should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"email",
u"defaultValue": u"bob@example.com",
u"type": u"email",
u"required": True,
u"label": u"Email",
u"placeholder": u"username@domain.com",
u"restrictions": {
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH
},
}
)
# Full name should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"name",
u"defaultValue": u"Bob",
u"type": u"text",
u"required": True,
u"label": u"Full name",
u"placeholder": u"Jane Doe",
u"instructions": u"Needed for any certificates you may earn",
u"restrictions": {
"max_length": NAME_MAX_LENGTH,
}
}
)
# Username should be filled in
self._assert_reg_field(
no_extra_fields_setting,
{
u"name": u"username",
u"defaultValue": u"Bob123",
u"type": u"text",
u"required": True,
u"label": u"Public username",
u"placeholder": u"JaneDoe",
u"instructions": u"The name that will identify you in your courses - <strong>(cannot be changed later)</strong>",
u"restrictions": {
"min_length": USERNAME_MIN_LENGTH,
"max_length": USERNAME_MAX_LENGTH
}
}
)
def test_register_form_level_of_education(self):
self._assert_reg_field(
{"level_of_education": "optional"},
{
"name": "level_of_education",
"type": "select",
"required": False,
"label": "Highest level of education completed",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "p", "name": "Doctorate"},
{"value": "m", "name": "Master's or professional degree"},
{"value": "b", "name": "Bachelor's degree"},
{"value": "a", "name": "Associate degree"},
{"value": "hs", "name": "Secondary/high school"},
{"value": "jhs", "name": "Junior secondary/junior high/middle school"},
{"value": "el", "name": "Elementary/primary school"},
{"value": "none", "name": "None"},
{"value": "other", "name": "Other"},
],
}
)
def test_register_form_gender(self):
self._assert_reg_field(
{"gender": "optional"},
{
"name": "gender",
"type": "select",
"required": False,
"label": "Gender",
"options": [
{"value": "", "name": "--", "default": True},
{"value": "m", "name": "Male"},
{"value": "f", "name": "Female"},
{"value": "o", "name": "Other"},
],
}
)
def test_register_form_year_of_birth(self):
this_year = datetime.datetime.now(UTC).year # pylint: disable=maybe-no-member
year_options = (
[{"value": "", "name": "--", "default": True}] + [
{"value": unicode(year), "name": unicode(year)}
for year in range(this_year, this_year - 120, -1)
]
)
self._assert_reg_field(
{"year_of_birth": "optional"},
{
"name": "year_of_birth",
"type": "select",
"required": False,
"label": "Year of birth",
"options": year_options,
}
)
def test_registration_form_mailing_address(self):
self._assert_reg_field(
{"mailing_address": "optional"},
{
"name": "mailing_address",
"type": "textarea",
"required": False,
"label": "Mailing address",
}
)
def test_registration_form_goals(self):
self._assert_reg_field(
{"goals": "optional"},
{
"name": "goals",
"type": "textarea",
"required": False,
"label": "Tell us why you're interested in {platform_name}".format(
platform_name=settings.PLATFORM_NAME
)
}
)
def test_registration_form_city(self):
self._assert_reg_field(
{"city": "optional"},
{
"name": "city",
"type": "text",
"required": False,
"label": "City",
}
)
def test_registration_form_country(self):
country_options = (
[{"name": "--", "value": "", "default": True}] +
[
{"value": country_code, "name": unicode(country_name)}
for country_code, country_name in SORTED_COUNTRIES
]
)
self._assert_reg_field(
{"country": "required"},
{
"label": "Country",
"name": "country",
"type": "select",
"required": True,
"options": country_options,
"errorMessages": {
"required": "Please select your Country."
},
}
)
@override_settings(
MKTG_URLS={"ROOT": "https://www.test.com/", "HONOR": "honor"},
)
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": True})
def test_registration_honor_code_mktg_site_enabled(self):
self._assert_reg_field(
{"honor_code": "required"},
{
"label": "I agree to the {platform_name} <a href=\"https://www.test.com/honor\">Terms of Service and Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"https://www.test.com/honor\">Terms of Service and Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
@override_settings(MKTG_URLS_LINK_MAP={"HONOR": "honor"})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": False})
def test_registration_honor_code_mktg_site_disabled(self):
self._assert_reg_field(
{"honor_code": "required"},
{
"label": "I agree to the {platform_name} <a href=\"/honor\">Terms of Service and Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"/honor\">Terms of Service and Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
@override_settings(MKTG_URLS={
"ROOT": "https://www.test.com/",
"HONOR": "honor",
"TOS": "tos",
})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": True})
def test_registration_separate_terms_of_service_mktg_site_enabled(self):
# Honor code field should say ONLY honor code,
# not "terms of service and honor code"
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": "I agree to the {platform_name} <a href=\"https://www.test.com/honor\">Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"https://www.test.com/honor\">Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
# Terms of service field should also be present
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": "I agree to the {platform_name} <a href=\"https://www.test.com/tos\">Terms of Service</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "terms_of_service",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"https://www.test.com/tos\">Terms of Service</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
@override_settings(MKTG_URLS_LINK_MAP={"HONOR": "honor", "TOS": "tos"})
@mock.patch.dict(settings.FEATURES, {"ENABLE_MKTG_SITE": False})
def test_registration_separate_terms_of_service_mktg_site_disabled(self):
# Honor code field should say ONLY honor code,
# not "terms of service and honor code"
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": "I agree to the {platform_name} <a href=\"/honor\">Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "honor_code",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"/honor\">Honor Code</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
# Terms of service field should also be present
self._assert_reg_field(
{"honor_code": "required", "terms_of_service": "required"},
{
"label": "I agree to the {platform_name} <a href=\"/tos\">Terms of Service</a>.".format(
platform_name=settings.PLATFORM_NAME
),
"name": "terms_of_service",
"defaultValue": False,
"type": "checkbox",
"required": True,
"errorMessages": {
"required": "You must agree to the {platform_name} <a href=\"/tos\">Terms of Service</a>.".format(
platform_name=settings.PLATFORM_NAME
)
}
}
)
@override_settings(REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"city": "optional",
"country": "required",
"honor_code": "required",
})
def test_field_order(self):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that all fields render in the correct order
form_desc = json.loads(response.content)
field_names = [field["name"] for field in form_desc["fields"]]
self.assertEqual(field_names, [
"email",
"name",
"username",
"password",
"city",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
])
def test_register(self):
# Create a new registration
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
self.assertIn(settings.EDXMKTG_LOGGED_IN_COOKIE_NAME, self.client.cookies)
self.assertIn(settings.EDXMKTG_USER_INFO_COOKIE_NAME, self.client.cookies)
user = User.objects.get(username=self.USERNAME)
account_settings = get_account_settings(user)
self.assertEqual(self.USERNAME, account_settings["username"])
self.assertEqual(self.EMAIL, account_settings["email"])
self.assertFalse(account_settings["is_active"])
self.assertEqual(self.NAME, account_settings["name"])
# Verify that we've been logged in
# by trying to access a page that requires authentication
response = self.client.get(reverse("dashboard"))
self.assertHttpOK(response)
@override_settings(REGISTRATION_EXTRA_FIELDS={
"level_of_education": "optional",
"gender": "optional",
"year_of_birth": "optional",
"mailing_address": "optional",
"goals": "optional",
"country": "required",
})
def test_register_with_profile_info(self):
# Register, providing lots of demographic info
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"level_of_education": self.EDUCATION,
"mailing_address": self.ADDRESS,
"year_of_birth": self.YEAR_OF_BIRTH,
"goals": self.GOALS,
"country": self.COUNTRY,
"honor_code": "true",
})
self.assertHttpOK(response)
# Verify the user's account
user = User.objects.get(username=self.USERNAME)
account_settings = get_account_settings(user)
self.assertEqual(account_settings["level_of_education"], self.EDUCATION)
self.assertEqual(account_settings["mailing_address"], self.ADDRESS)
self.assertEqual(account_settings["year_of_birth"], int(self.YEAR_OF_BIRTH))
self.assertEqual(account_settings["goals"], self.GOALS)
self.assertEqual(account_settings["country"], self.COUNTRY)
def test_activation_email(self):
# Register, which should trigger an activation email
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Verify that the activation email was sent
self.assertEqual(len(mail.outbox), 1)
sent_email = mail.outbox[0]
self.assertEqual(sent_email.to, [self.EMAIL])
self.assertEqual(sent_email.subject, "Activate Your edX Account")
self.assertIn(
u"activating your {platform} account".format(platform=settings.PLATFORM_NAME),
sent_email.body
)
@ddt.data(
{"email": ""},
{"email": "invalid"},
{"name": ""},
{"username": ""},
{"username": "a"},
{"password": ""},
)
def test_register_invalid_input(self, invalid_fields):
# Initially, the field values are all valid
data = {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
}
# Override the valid fields, making the input invalid
data.update(invalid_fields)
# Attempt to create the account, expecting an error response
response = self.client.post(self.url, data)
self.assertHttpBadRequest(response)
@override_settings(REGISTRATION_EXTRA_FIELDS={"country": "required"})
@ddt.data("email", "name", "username", "password", "country")
def test_register_missing_required_field(self, missing_field):
data = {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"country": self.COUNTRY,
}
del data[missing_field]
# Send a request missing a field
response = self.client.post(self.url, data)
self.assertHttpBadRequest(response)
def test_register_duplicate_email(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same email address
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": "someone_else",
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content)
self.assertEqual(
response_json,
{
"email": [{
"user_message": (
"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
def test_register_duplicate_username(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username
response = self.client.post(self.url, {
"email": "someone+else@example.com",
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content)
self.assertEqual(
response_json,
{
"username": [{
"user_message": (
"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}]
}
)
def test_register_duplicate_username_and_email(self):
# Register the first user
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": self.NAME,
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertHttpOK(response)
# Try to create a second user with the same username
response = self.client.post(self.url, {
"email": self.EMAIL,
"name": "Someone Else",
"username": self.USERNAME,
"password": self.PASSWORD,
"honor_code": "true",
})
self.assertEqual(response.status_code, 409)
response_json = json.loads(response.content)
self.assertEqual(
response_json,
{
"username": [{
"user_message": (
"It looks like {} belongs to an existing account. "
"Try again with a different username."
).format(
self.USERNAME
)
}],
"email": [{
"user_message": (
"It looks like {} belongs to an existing account. "
"Try again with a different email address."
).format(
self.EMAIL
)
}]
}
)
def test_missing_fields(self):
response = self.client.post(
self.url,
{
"email": self.EMAIL,
"name": self.NAME,
"honor_code": "true",
}
)
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(
response_json,
{
"username": [{"user_message": "Username must be minimum of two characters long"}],
"password": [{"user_message": "A valid password is required"}],
}
)
def _assert_reg_field(self, extra_fields_setting, expected_field):
"""Retrieve the registration form description from the server and
verify that it contains the expected field.
Args:
extra_fields_setting (dict): Override the Django setting controlling
which extra fields are displayed in the form.
expected_field (dict): The field definition we expect to find in the form.
Raises:
AssertionError
"""
# Add in fields that are always present
defaults = [
("label", ""),
("instructions", ""),
("placeholder", ""),
("defaultValue", ""),
("restrictions", {}),
("errorMessages", {}),
]
for key, value in defaults:
if key not in expected_field:
expected_field[key] = value
# Retrieve the registration form description
with override_settings(REGISTRATION_EXTRA_FIELDS=extra_fields_setting):
response = self.client.get(self.url)
self.assertHttpOK(response)
# Verify that the form description matches what we'd expect
form_desc = json.loads(response.content)
# Search the form for this field
actual_field = None
for field in form_desc["fields"]:
if field["name"] == expected_field["name"]:
actual_field = field
break
self.assertIsNot(
actual_field, None,
msg="Could not find field {name}".format(name=expected_field["name"])
)
for key, value in expected_field.iteritems():
self.assertEqual(
expected_field[key], actual_field[key],
msg=u"Expected {expected} for {key} but got {actual} instead".format(
key=key,
expected=expected_field[key],
actual=actual_field[key]
)
)
@httpretty.activate
@ddt.ddt
class ThirdPartyRegistrationTestMixin(ThirdPartyOAuthTestMixin):
"""
Tests for the User API registration endpoint with 3rd party authentication.
"""
def setUp(self):
super(ThirdPartyRegistrationTestMixin, self).setUp(create_user=False)
self.url = reverse('user_api_registration')
def data(self, user=None):
"""Returns the request data for the endpoint."""
return {
"provider": self.BACKEND,
"access_token": self.access_token,
"client_id": self.client_id,
"honor_code": "true",
"country": "US",
"username": user.username if user else "test_username",
"name": user.first_name if user else "test name",
"email": user.email if user else "test@test.com",
}
def _assert_existing_user_error(self, response):
"""Assert that the given response was an error with the given status_code and error code."""
self.assertEqual(response.status_code, 409)
errors = json.loads(response.content)
for conflict_attribute in ["username", "email"]:
self.assertIn(conflict_attribute, errors)
self.assertIn("belongs to an existing account", errors[conflict_attribute][0]["user_message"])
self.assertNotIn("partial_pipeline", self.client.session)
def _assert_access_token_error(self, response, expected_error_message):
"""Assert that the given response was an error for the access_token field with the given error message."""
self.assertEqual(response.status_code, 400)
response_json = json.loads(response.content)
self.assertEqual(
response_json,
{"access_token": [{"user_message": expected_error_message}]}
)
self.assertNotIn("partial_pipeline", self.client.session)
def _verify_user_existence(self, user_exists, social_link_exists, user_is_active=None, username=None):
"""Verifies whether the user object exists."""
users = User.objects.filter(username=(username if username else "test_username"))
self.assertEquals(users.exists(), user_exists)
if user_exists:
self.assertEquals(users[0].is_active, user_is_active)
self.assertEqual(
UserSocialAuth.objects.filter(user=users[0], provider=self.BACKEND).exists(),
social_link_exists
)
else:
self.assertEquals(UserSocialAuth.objects.count(), 0)
def test_success(self):
self._verify_user_existence(user_exists=False, social_link_exists=False)
self._setup_provider_response(success=True)
response = self.client.post(self.url, self.data())
self.assertEqual(response.status_code, 200)
self._verify_user_existence(user_exists=True, social_link_exists=True, user_is_active=False)
def test_unlinked_active_user(self):
user = UserFactory()
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=False, user_is_active=True, username=user.username
)
def test_unlinked_inactive_user(self):
user = UserFactory(is_active=False)
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=False, user_is_active=False, username=user.username
)
def test_user_already_registered(self):
self._setup_provider_response(success=True)
user = UserFactory()
UserSocialAuth.objects.create(user=user, provider=self.BACKEND, uid=self.social_uid)
response = self.client.post(self.url, self.data(user))
self._assert_existing_user_error(response)
self._verify_user_existence(
user_exists=True, social_link_exists=True, user_is_active=True, username=user.username
)
def test_social_user_conflict(self):
self._setup_provider_response(success=True)
user = UserFactory()
UserSocialAuth.objects.create(user=user, provider=self.BACKEND, uid=self.social_uid)
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is already associated with another user.")
self._verify_user_existence(
user_exists=True, social_link_exists=True, user_is_active=True, username=user.username
)
def test_invalid_token(self):
self._setup_provider_response(success=False)
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is not valid.")
self._verify_user_existence(user_exists=False, social_link_exists=False)
def test_missing_token(self):
data = self.data()
data.pop("access_token")
response = self.client.post(self.url, data)
self._assert_access_token_error(
response,
"An access_token is required when passing value ({}) for provider.".format(self.BACKEND)
)
self._verify_user_existence(user_exists=False, social_link_exists=False)
@skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class TestFacebookRegistrationView(
ThirdPartyRegistrationTestMixin, ThirdPartyOAuthTestMixinFacebook, TransactionTestCase
):
"""Tests the User API registration endpoint with Facebook authentication."""
def test_social_auth_exception(self):
"""
According to the do_auth method in social.backends.facebook.py,
the Facebook API sometimes responds back a JSON with just False as value.
"""
self._setup_provider_response_with_body(200, json.dumps("false"))
response = self.client.post(self.url, self.data())
self._assert_access_token_error(response, "The provided access_token is not valid.")
self._verify_user_existence(user_exists=False, social_link_exists=False)
@skipUnless(settings.FEATURES.get("ENABLE_THIRD_PARTY_AUTH"), "third party auth not enabled")
class TestGoogleRegistrationView(
ThirdPartyRegistrationTestMixin, ThirdPartyOAuthTestMixinGoogle, TransactionTestCase
):
"""Tests the User API registration endpoint with Google authentication."""
pass
@ddt.ddt
class UpdateEmailOptInTestCase(ApiTestCase, ModuleStoreTestCase):
"""Tests the UpdateEmailOptInPreference view. """
USERNAME = "steve"
EMAIL = "steve@isawesome.com"
PASSWORD = "steveopolis"
def setUp(self):
""" Create a course and user, then log in. """
super(UpdateEmailOptInTestCase, self).setUp()
self.course = CourseFactory.create()
self.user = UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.url = reverse("preferences_email_opt_in")
@ddt.data(
(u"True", u"True"),
(u"true", u"True"),
(u"TrUe", u"True"),
(u"Banana", u"False"),
(u"strawberries", u"False"),
(u"False", u"False"),
)
@ddt.unpack
def test_update_email_opt_in(self, opt, result):
"""Tests the email opt in preference"""
# Register, which should trigger an activation email
response = self.client.post(self.url, {
"course_id": unicode(self.course.id),
"email_opt_in": opt
})
self.assertHttpOK(response)
preference = UserOrgTag.objects.get(
user=self.user, org=self.course.id.org, key="email-optin"
)
self.assertEquals(preference.value, result)
@ddt.data(
(True, False),
(False, True),
(False, False)
)
@ddt.unpack
def test_update_email_opt_in_wrong_params(self, use_course_id, use_opt_in):
"""Tests the email opt in preference"""
params = {}
if use_course_id:
params["course_id"] = unicode(self.course.id)
if use_opt_in:
params["email_opt_in"] = u"True"
response = self.client.post(self.url, params)
self.assertHttpBadRequest(response)
def test_update_email_opt_in_inactive_user(self):
"""Test that an inactive user can still update their email optin preference."""
self.user.is_active = False
self.user.save()
# Register, which should trigger an activation email
response = self.client.post(self.url, {
"course_id": unicode(self.course.id),
"email_opt_in": u"True"
})
self.assertHttpOK(response)
preference = UserOrgTag.objects.get(
user=self.user, org=self.course.id.org, key="email-optin"
)
self.assertEquals(preference.value, u"True")
def test_update_email_opt_with_invalid_course_key(self):
"""
Test that with invalid key it returns bad request
and not update their email optin preference.
"""
response = self.client.post(self.url, {
"course_id": 'invalid',
"email_opt_in": u"True"
})
self.assertHttpBadRequest(response)
with self.assertRaises(UserOrgTag.DoesNotExist):
UserOrgTag.objects.get(user=self.user, org=self.course.id.org, key="email-optin")
| agpl-3.0 |
navigator8972/pylqr | pylqr_trajctrl.py | 1 | 8885 | """
LQR based trajectory controller
"""
from __future__ import print_function
try:
import jax.numpy as np
except ImportError:
import numpy as np
import pylqr
class PyLQR_TrajCtrl():
"""
Use second-order system and acceleration as system and input
The trajectory is given as a set of reference waypoints and associated tracking weights
or general cost function (use finite difference to have gradient & hessian)
"""
def __init__(self, R=.01, dt=0.01, use_autograd=False):
#control penalty, smoothness of the trajectory
self.R_ = R
self.dt_ = dt
self.Q_vel_ratio_ = 10
#desired functions for plant dynamics and cost
self.plant_dyn_ = None
self.plant_dyn_dx_ = None
self.plant_dyn_du_ = None
self.cost_ = None
self.cost_dx_ = None
self.cost_du_ = None
self.cost_dxx_ = None
self.cost_duu_ = None
self.cost_dux_ = None
self.ilqr_ = None
self.use_autograd=use_autograd
return
def build_ilqr_general_solver(self, cost_func, n_dims=2, T=100):
#figure out dimension
self.T_ = T
self.n_dims_ = n_dims
#build dynamics, second-order linear dynamical system
self.A_ = np.eye(self.n_dims_*2)
self.A_[0:self.n_dims_, self.n_dims_:] = np.eye(self.n_dims_) * self.dt_
self.B_ = np.zeros((self.n_dims_*2, self.n_dims_))
self.B_[self.n_dims_:, :] = np.eye(self.n_dims_) * self.dt_
self.plant_dyn_ = lambda x, u, t, aux: np.dot(self.A_, x) + np.dot(self.B_, u)
self.plant_dyn_dx_ = lambda x, u, t, aux: self.A_
self.plant_dyn_du_ = lambda x, u, t, aux: self.B_
self.cost_ = cost_func
#build an iLQR solver based on given functions...
self.ilqr_ = pylqr.PyLQR_iLQRSolver(T=self.T_-1, plant_dyn=self.plant_dyn_, cost=self.cost_, use_autograd=self.use_autograd)
return
def build_ilqr_tracking_solver(self, ref_pnts, weight_mats):
#figure out dimension
self.T_ = len(ref_pnts)
self.n_dims_ = len(ref_pnts[0])
self.ref_array = np.copy(ref_pnts)
self.weight_array = [mat for mat in weight_mats]
#clone weight mats if there are not enough weight mats
for i in range(self.T_ - len(self.weight_array)):
self.weight_array.append(self.weight_array[-1])
#build dynamics, second-order linear dynamical system
self.A_ = np.eye(self.n_dims_*2)
self.A_[0:self.n_dims_, self.n_dims_:] = np.eye(self.n_dims_) * self.dt_
self.B_ = np.zeros((self.n_dims_*2, self.n_dims_))
self.B_[self.n_dims_:, :] = np.eye(self.n_dims_) * self.dt_
self.plant_dyn_ = lambda x, u, t, aux: np.dot(self.A_, x) + np.dot(self.B_, u)
#build cost functions, quadratic ones
def tmp_cost_func(x, u, t, aux):
err = x[0:self.n_dims_] - self.ref_array[t]
#autograd does not allow A.dot(B)
cost = np.dot(np.dot(err, self.weight_array[t]), err) + np.sum(u**2) * self.R_
if t > self.T_-1:
#regularize velocity for the termination point
#autograd does not allow self increment
cost = cost + np.sum(x[self.n_dims_:]**2) * self.R_ * self.Q_vel_ratio_
return cost
self.cost_ = tmp_cost_func
self.ilqr_ = pylqr.PyLQR_iLQRSolver(T=self.T_-1, plant_dyn=self.plant_dyn_, cost=self.cost_, use_autograd=self.use_autograd)
if not self.use_autograd:
self.plant_dyn_dx_ = lambda x, u, t, aux: self.A_
self.plant_dyn_du_ = lambda x, u, t, aux: self.B_
def tmp_cost_func_dx(x, u, t, aux):
err = x[0:self.n_dims_] - self.ref_array[t]
grad = np.concatenate([2*err.dot(self.weight_array[t]), np.zeros(self.n_dims_)])
if t > self.T_-1:
grad[self.n_dims_:] = grad[self.n_dims_:] + 2 * self.R_ * self.Q_vel_ratio_ * x[self.n_dims_, :]
return grad
self.cost_dx_ = tmp_cost_func_dx
self.cost_du_ = lambda x, u, t, aux: 2 * self.R_ * u
def tmp_cost_func_dxx(x, u, t, aux):
hessian = np.zeros((2*self.n_dims_, 2*self.n_dims_))
hessian[0:self.n_dims_, 0:self.n_dims_] = 2 * self.weight_array[t]
if t > self.T_-1:
hessian[self.n_dims_:, self.n_dims_:] = 2 * np.eye(self.n_dims_) * self.R_ * self.Q_vel_ratio_
return hessian
self.cost_dxx_ = tmp_cost_func_dxx
self.cost_duu_ = lambda x, u, t, aux: 2 * self.R_ * np.eye(self.n_dims_)
self.cost_dux_ = lambda x, u, t, aux: np.zeros((self.n_dims_, 2*self.n_dims_))
#build an iLQR solver based on given functions...
self.ilqr_.plant_dyn_dx = self.plant_dyn_dx_
self.ilqr_.plant_dyn_du = self.plant_dyn_du_
self.ilqr_.cost_dx = self.cost_dx_
self.ilqr_.cost_du = self.cost_du_
self.ilqr_.cost_dxx = self.cost_dxx_
self.ilqr_.cost_duu = self.cost_duu_
self.ilqr_.cost_dux = self.cost_dux_
return
def synthesize_trajectory(self, x0, u_array=None, n_itrs=50, tol=1e-6, verbose=True):
if self.ilqr_ is None:
print('No iLQR solver has been prepared.')
return None
#initialization doesn't matter as global optimality can be guaranteed?
if u_array is None:
u_init = [np.zeros(self.n_dims_) for i in range(self.T_-1)]
else:
u_init = u_array
x_init = np.concatenate([x0, np.zeros(self.n_dims_)])
res = self.ilqr_.ilqr_iterate(x_init, u_init, n_itrs=n_itrs, tol=tol, verbose=verbose)
return res['x_array_opt'][:, 0:self.n_dims_]
"""
Test case, 2D trajectory to track a sinuoidal..
"""
import matplotlib.pyplot as plt
def PyLQR_TrajCtrl_TrackingTest():
n_pnts = 200
x_coord = np.linspace(0.0, 2*np.pi, n_pnts)
y_coord = np.sin(x_coord)
#concatenate to have trajectory
ref_traj = np.array([x_coord, y_coord]).T
weight_mats = [ np.eye(ref_traj.shape[1])*100 ]
#draw reference trajectory
fig = plt.figure()
ax = fig.add_subplot(111)
ax.hold(True)
ax.plot(ref_traj[:, 0], ref_traj[:, 1], '.-k', linewidth=3.5)
ax.plot([ref_traj[0, 0]], [ref_traj[0, 1]], '*k', markersize=16)
lqr_traj_ctrl = PyLQR_TrajCtrl(use_autograd=True)
lqr_traj_ctrl.build_ilqr_tracking_solver(ref_traj, weight_mats)
n_queries = 5
for _ in range(n_queries):
#start from a perturbed point
x0 = ref_traj[0, :] + np.random.rand(2) * 2 - 1
syn_traj = lqr_traj_ctrl.synthesize_trajectory(x0)
#plot it
ax.plot(syn_traj[:, 0], syn_traj[:, 1], linewidth=3.5)
plt.show()
return
def PyLQR_TrajCtrl_GeneralTest():
#build RBF basis
rbf_basis = np.array([
[-1.0, -1.0],
[-1.0, 1.0],
[1.0, -1.0],
[1.0, 1.0]
])
gamma = 1
T = 100
R = 1e-5
# rbf_funcs = [lambda x, u, t, aux: np.exp(-gamma*np.linalg.norm(x[0:2]-basis)**2) + .01*np.linalg.norm(u)**2 for basis in rbf_basis]
rbf_funcs = [
lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[0])**2) + R*np.linalg.norm(u)**2,
lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[1])**2) + R*np.linalg.norm(u)**2,
lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[2])**2) + R*np.linalg.norm(u)**2,
lambda x, u, t, aux: -np.exp(-gamma*np.linalg.norm(x[0:2]-rbf_basis[3])**2) + R*np.linalg.norm(u)**2
]
weights = np.array([.75, .5, .25, 1.])
weights = weights / (np.sum(weights) + 1e-6)
cost_func = lambda x, u, t, aux: np.sum(weights * np.array([basis_func(x, u, t, aux) for basis_func in rbf_funcs]))
lqr_traj_ctrl = PyLQR_TrajCtrl(use_autograd=True)
lqr_traj_ctrl.build_ilqr_general_solver(cost_func, n_dims=rbf_basis.shape[1], T=T)
n_eval_pnts = 50
coords = np.linspace(-2.5, 2.5, n_eval_pnts)
xv, yv = np.meshgrid(coords, coords)
z = [[cost_func(np.array([xv[i, j], yv[i, j]]), np.zeros(2), None, None) for j in range(yv.shape[1])] for i in range(len(xv))]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.contour(xv, yv, z)
n_queries = 5
u_array = np.random.rand(2, T-1).T * 2 - 1
for i in range(n_queries):
#start from a perturbed point
x0 = np.random.rand(2) * 4 - 2
syn_traj = lqr_traj_ctrl.synthesize_trajectory(x0, u_array)
#plot it
ax.plot([x0[0]], [x0[1]], 'k*', markersize=12.0)
ax.plot(syn_traj[:, 0], syn_traj[:, 1], linewidth=3.5)
plt.show()
return
if __name__ == '__main__':
# PyLQR_TrajCtrl_TrackingTest()
PyLQR_TrajCtrl_GeneralTest() | gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.