repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
uclouvain/osis | education_group/tests/views/mini_training/test_content_read.py | 1 | 5481 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import List
from django.http import HttpResponseForbidden, HttpResponse, HttpResponseNotFound
from django.test import TestCase
from django.urls import reverse
from base.models.enums.education_group_types import MiniTrainingType
from base.tests.factories.academic_year import AcademicYearFactory
from base.tests.factories.person import PersonWithPermissionsFactory
from base.tests.factories.user import UserFactory
from base.utils.urls import reverse_with_get
from education_group.ddd.domain.group import Group
from education_group.views.mini_training.common_read import Tab
from program_management.tests.factories.education_group_version import EducationGroupVersionFactory
from program_management.tests.factories.element import ElementGroupYearFactory
class TestMiniTrainingReadContent(TestCase):
@classmethod
def setUpTestData(cls):
cls.academic_year = AcademicYearFactory(current=True)
cls.person = PersonWithPermissionsFactory('view_educationgroup')
cls.mini_training_version = EducationGroupVersionFactory(
offer__acronym="APPBIOL",
offer__academic_year=cls.academic_year,
offer__education_group_type__name=MiniTrainingType.DEEPENING.name,
root_group__partial_acronym="LBIOL100P",
root_group__acronym="APPBIOL",
root_group__academic_year=cls.academic_year,
root_group__education_group_type__name=MiniTrainingType.DEEPENING.name,
)
ElementGroupYearFactory(group_year=cls.mini_training_version.root_group)
cls.url = reverse('mini_training_content', kwargs={'year': cls.academic_year.year, 'code': 'LBIOL100P'})
def setUp(self) -> None:
self.client.force_login(self.person.user)
def test_case_user_not_logged(self):
self.client.logout()
response = self.client.get(self.url)
self.assertRedirects(response, '/login/?next={}'.format(self.url))
def test_case_user_have_not_permission(self):
self.client.force_login(UserFactory())
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
self.assertTemplateUsed(response, "access_denied.html")
def test_case_mini_training_not_exists(self):
dummy_url = reverse('mini_training_content', kwargs={'year': 2018, 'code': 'DUMMY100B'})
response = self.client.get(dummy_url)
self.assertEqual(response.status_code, HttpResponseNotFound.status_code)
def test_assert_template_used(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, HttpResponse.status_code)
self.assertTemplateUsed(response, "education_group_app/mini_training/content_read.html")
def test_assert_context_data(self):
response = self.client.get(self.url)
self.assertEqual(response.context['person'], self.person)
self.assertEqual(response.context['group_year'], self.mini_training_version.root_group)
self.assertEqual(response.context['update_permission_name'], "base.change_link_data")
expected_tree_json_url = reverse_with_get(
'tree_json',
kwargs={'root_id': self.mini_training_version.root_group.element.pk},
get={"path": str(self.mini_training_version.root_group.element.pk)}
)
self.assertEqual(response.context['tree_json_url'], expected_tree_json_url)
self.assertIsInstance(response.context['group'], Group)
self.assertIsInstance(response.context['children'], List)
def test_assert_active_tabs_is_content_and_others_are_not_active(self):
response = self.client.get(self.url)
self.assertTrue(response.context['tab_urls'][Tab.CONTENT]['active'])
self.assertFalse(response.context['tab_urls'][Tab.IDENTIFICATION]['active'])
self.assertFalse(response.context['tab_urls'][Tab.UTILIZATION]['active'])
self.assertFalse(response.context['tab_urls'][Tab.GENERAL_INFO]['active'])
self.assertFalse(response.context['tab_urls'][Tab.SKILLS_ACHIEVEMENTS]['active'])
self.assertFalse(response.context['tab_urls'][Tab.ACCESS_REQUIREMENTS]['active'])
| agpl-3.0 |
DirtyUnicorns/android_external_chromium_org | ppapi/generators/generator.py | 165 | 1776 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import traceback
# Note: some of these files are imported to register cmdline options.
from idl_generator import Generator
from idl_option import ParseOptions
from idl_outfile import IDLOutFile
from idl_parser import ParseFiles
from idl_c_header import HGen
from idl_thunk import TGen
from idl_gen_pnacl import PnaclGen
def Main(args):
# If no arguments are provided, assume we are trying to rebuild the
# C headers with warnings off.
try:
if not args:
args = [
'--wnone', '--cgen', '--range=start,end',
'--pnacl', '--pnaclshim',
'../native_client/src/untrusted/pnacl_irt_shim/pnacl_shim.c',
'--tgen',
]
current_dir = os.path.abspath(os.getcwd())
script_dir = os.path.abspath(os.path.dirname(__file__))
if current_dir != script_dir:
print '\nIncorrect CWD, default run skipped.'
print 'When running with no arguments set CWD to the scripts directory:'
print '\t' + script_dir + '\n'
print 'This ensures correct default paths and behavior.\n'
return 1
filenames = ParseOptions(args)
ast = ParseFiles(filenames)
if ast.errors:
print 'Found %d errors. Aborting build.\n' % ast.errors
return 1
return Generator.Run(ast)
except SystemExit, ec:
print 'Exiting with %d' % ec.code
sys.exit(ec.code)
except:
typeinfo, value, tb = sys.exc_info()
traceback.print_exception(typeinfo, value, tb)
print 'Called with: ' + ' '.join(sys.argv)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
| bsd-3-clause |
carlo0815/enigma2-plugins | webinterface/src/WebComponents/Sources/Timer.py | 1 | 17251 | Version = '$Header$';
from enigma import eServiceReference, eEPGCache
from Components.config import config
from Components.Sources.Source import Source
from Components.TimerSanityCheck import TimerSanityCheck
from Components.UsageConfig import preferredInstantRecordPath, preferredTimerPath
from ServiceReference import ServiceReference
from RecordTimer import RecordTimerEntry, RecordTimer, AFTEREVENT, parseEvent
from xml.sax.saxutils import unescape
from time import time, strftime, localtime, mktime
class Timer(Source):
LIST = 0
ADDBYID = 1
ADD = 2
DEL = 3
TVBROWSER = 4
CHANGE = 5
WRITE = 6
RECNOW = 7
CLEANUP = 8
def __init__(self, session, func=LIST):
self.func = func
Source.__init__(self)
self.session = session
self.recordtimer = session.nav.RecordTimer
self.epgcache = eEPGCache.getInstance()
self.res = ( False, "unknown command" )
def handleCommand(self, cmd):
if self.func is self.ADDBYID:
self.res = self.addTimerByEventID(cmd)
self.writeTimerList()
elif self.func is self.ADD:
self.res = self.editTimer(cmd)
self.writeTimerList()
elif self.func is self.TVBROWSER:
self.res = self.tvBrowser(cmd)
self.writeTimerList()
elif self.func is self.DEL:
self.res = self.delTimer(cmd)
self.writeTimerList()
elif self.func is self.CHANGE:
self.res = self.editTimer(cmd)
self.writeTimerList()
elif self.func is self.WRITE:
self.res = self.writeTimerList(force=True)
elif self.func is self.RECNOW:
self.res = self.recordNow(cmd)
elif self.func is self.CLEANUP:
self.res = self.cleanupTimer()
else:
self.res = ( False, _("Unknown function: '%s'") % (self.func) )
def cleanupTimer(self):
print "[WebComponents.Timer] cleanupTimer"
self.session.nav.RecordTimer.cleanup()
return ( True, _("List of Timers has been cleaned") )
def delTimer(self, param):
print "[WebComponents.Timer] delTimer"
if 'sRef' in param:
service_ref = ServiceReference(param['sRef'])
else:
return ( False, _("Missing Parameter: sRef") )
if 'begin' in param:
begin = int(float(param['begin']))
else:
return ( False, _("Missing Parameter: begin") )
if 'end' in param:
end = int(float(param['end']))
else:
return ( False, _("Missing Parameter: end") )
try:
for timer in self.recordtimer.timer_list + self.recordtimer.processed_timers:
if str(timer.service_ref) == str(service_ref) and int(timer.begin) == begin and int(timer.end) == end:
self.recordtimer.removeEntry(timer)
return True, _("The timer '%s' has been deleted successfully") % (timer.name)
except Exception:
return ( False, _("The timer has NOT been deleted") )
return False, "No matching Timer found"
def tvBrowser(self, param):
""" The URL's for the tvBrowser-Capture-Driver are:
http://dreambox/web/tvbrowser? +
To add something:
&command=add&&year={year}&month={month}&day={day}&shour={start_hour}&smin={start_minute}&ehour={end_hour}&emin={end_minute}&sRef={urlencode(channel_name_external, "utf8")}&name={urlencode(title, "utf8")}&description={urlencode(descr, "utf8")}&dirname={dirname}&tags={urlencode("tag1 tag2...", "utf8")}&afterevent=0&eit=&disabled=0&justplay=0&repeated=0
to zap for some time:
&command=add&&year={year}&month={month}&day={day}&shour={start_hour}&smin={start_minute}&ehour={end_hour}&emin={end_minute}&sRef={urlencode(channel_name_external, "utf8")}&name={urlencode(title, "utf8")}&description={urlencode(descr, "utf8")}&dirname={dirname}&tags={urlencode("tag1 tag2...", "utf8")}&afterevent=0&eit=&disabled=0&justplay=1&repeated=0
to delete something:
&command=del&&year={year}&month={month}&day={day}&shour={start_hour}&smin={start_minute}&ehour={end_hour}&emin={end_minute}&sRef={urlencode(channel_name_external, "utf8")}
"""
print "[WebComponents.Timer] tvbrowser"
listDate = ('year', 'month', 'day', 'shour', 'smin', 'ehour', 'emin')
for element in listDate:
if param[element] is None:
if param['s' + element] is None:
return ( False, "%s missing" % element )
else:
param[element] = int(param['s' + element])
else:
param[element] = int(param[element])
param['begin'] = int(mktime((param['year'], param['month'], param['day'], param['shour'], param['smin'], 0, 0, 0, -1)))
param['end'] = int(mktime((param['year'], param['month'], param['day'], param['ehour'], param['emin'], 0, 0, 0, -1)))
if param['end'] < param['begin']:
param['end'] += 86400
for element in listDate:
del param[element]
if param['sRef'] is None:
return ( False, "Missing Parameter: sRef" )
else:
takeApart = param['sRef'].split('|')
if len(takeApart) > 1:
param['sRef'] = takeApart[1]
repeated = int(param.get('repeated') or 0)
if repeated == 0:
for element in ("mo", "tu", "we", "th", "fr", "sa", "su", "ms", "mf"):
if element in param:
number = param[element] or 0
del param[element]
repeated = repeated + int(number)
if repeated > 127:
repeated = 127
param['repeated'] = repeated
if param['command'] == "add":
del param['command']
return self.editTimer(param)
elif param['command'] == "del":
del param['command']
return self.delTimer(param)
elif param['command'] == "change":
del param['command']
return self.editTimer(param)
else:
return ( False, "Unknown command: '%s'" % param['command'] )
def recordNow(self, param):
limitEvent = True
if param == "undefinitely" or param == "infinite":
ret = (True, "Infinite Instant recording started")
limitEvent = False
else:
ret = ( True, "Instant record for current Event started" )
serviceref = ServiceReference(self.session.nav.getCurrentlyPlayingServiceReference().toString())
event = None
try:
service = self.session.nav.getCurrentService()
event = service.info().getEvent(0)
except Exception:
print "[Webcomponents.Timer] recordNow Exception!"
begin = time()
end = begin + 3600 * 10
name = "instant record"
description = ""
eventid = 0
if event is not None:
curEvent = parseEvent(event)
name = curEvent[2]
description = curEvent[3]
eventid = curEvent[4]
if limitEvent:
end = curEvent[1]
else:
if limitEvent:
ret = ( False, "No event found! Not recording!" )
if ret[0]:
location = preferredInstantRecordPath()
timer = RecordTimerEntry(serviceref, begin, end, name, description, eventid, False, False, 0, dirname=location)
timer.dontSave = True
recRet = self.recordtimer.record(timer)
if recRet is not None:
# a conflict is rather unlikely, but this can also indicate a non-recordable service
ret = (False, "Timer conflict detected! Not recording!" )
return ret
#===============================================================================
# This Function can add a new or edit an exisiting Timer.
# When the Parameter "deleteOldOnSave" is not set, a new Timer will be added.
# Otherwise, and if the parameters channelOld, beginOld and endOld are set,
# an existing timer with corresponding values will be changed.
#===============================================================================
def editTimer(self, param):
print "[WebComponents.Timer] editTimer"
#OK first we need to parse all of your Parameters
#For some of them (like afterEvent or justplay) we can use default values
#for others (the serviceReference or the Begin/End time of the timer
#we have to quit if they are not set/have illegal values
if 'sRef' not in param:
return ( False, _("Missing Parameter: sRef") )
service_ref = ServiceReference(param['sRef'])
repeated = int(param.get('repeated') or 0)
if 'begin' not in param:
return ( False, _("Missing Parameter: begin") )
begin = int(float(param['begin']))
if 'end' not in param:
return ( False, _("Missing Parameter: end") )
end = int(float(param['end']))
tm = int( time() )
if tm <= begin:
pass
elif tm > begin and tm < end and repeated == 0:
pass
elif repeated == 0:
return ( False, _("Illegal Parameter value for Parameter begin : '%s'") % begin )
if 'applyMargin' in param:
if param['applyMargin'] == "1":
begin -= config.recording.margin_before.value * 60
end += config.recording.margin_after.value * 60
if 'name' not in param:
return ( False, _("Missing Parameter: name") )
name = param['name']
if 'description' not in param:
return ( False, _("Missing Parameter: description") )
description = param['description'].replace("\n", " ")
eit = param.get("eit", None)
if eit is None or eit.strip() == "":
eit = 0
else:
try: eit = int(eit)
except ValueError: return ( False, _("Illegal Parameter value for Parameter eit : '%s'") % eit )
print "[WebComponents.Sources.Timer]: eit=%d" %eit
if eit != 0:
#check if the given event exists, if it doesn't the timer may be already running
epgcache = eEPGCache.getInstance()
event = epgcache.lookupEventId(eServiceReference(param['sRef']), eit)
if event:
eit = event.getEventId()
#TODO add some subservice handling
else:
print "[WebComponents.Sources.Timer] event not found, will be ignored"
disabled = False #Default to: Enabled
if 'disabled' in param:
if param['disabled'] == "1":
disabled = True
else:
#TODO - maybe we can give the user some useful hint here
pass
justplay = False #Default to: Record
if 'justplay' in param:
if param['justplay'] == "1":
justplay = True
afterEvent = 3 #Default to Afterevent: Auto
if 'afterevent' in param:
if (param['afterevent'] == "0") or (param['afterevent'] == "1") or (param['afterevent'] == "2"):
afterEvent = int(param['afterevent'])
dirname = preferredTimerPath()
if 'dirname' in param and param['dirname']:
dirname = param['dirname']
tags = []
if 'tags' in param and param['tags']:
tags = unescape(param['tags']).split(' ')
delold = 0
if 'deleteOldOnSave' in param:
delold = int(param['deleteOldOnSave'])
#Try to edit an existing Timer
if delold:
if 'channelOld' in param and param['channelOld']:
channelOld = ServiceReference(param['channelOld'])
else:
return ( False, _("Missing Parameter: channelOld") )
# We do need all of the following Parameters, too, for being able of finding the Timer.
# Therefore so we can neither use default values in this part nor can we
# continue if a parameter is missing
if 'beginOld' not in param:
return ( False, _("Missing Parameter: beginOld") )
beginOld = int(param['beginOld'])
if 'endOld' not in param:
return ( False, _("Missing Parameter: endOld") )
endOld = int(param['endOld'])
#let's try to find the timer
try:
for timer in self.recordtimer.timer_list + self.recordtimer.processed_timers:
if str(timer.service_ref) == str(channelOld):
if int(timer.begin) == beginOld:
if int(timer.end) == endOld: #we've found the timer we've been searching for
#set the new data
timer.service_ref = service_ref
timer.begin = begin
timer.end = end
timer.name = name
timer.description = description
timer.eit = eit
timer.disabled = disabled
timer.justplay = justplay
timer.afterEvent = afterEvent
timer.dirname = dirname
timer.tags = tags
timer.repeated = repeated
timer.processRepeated()
#sanity check
timersanitycheck = TimerSanityCheck(self.session.nav.RecordTimer.timer_list, timer)
conflicts = None
if not timersanitycheck.check():
conflicts = timersanitycheck.getSimulTimerList()
if conflicts is not None:
for x in conflicts:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
if not timersanitycheck.check():
conflicts = timersanitycheck.getSimulTimerList()
if conflicts is None:
self.recordtimer.timeChanged(timer) #go and save it
print "[WebComponents.Timer] editTimer: Timer changed!"
return ( True, "Timer '%s' changed" %(timer.name) )
else:
print "[WebComponents.Timer] editTimer conflicting Timers: %s" %(conflicts)
msg = ""
for t in conflicts:
msg = "%s / %s" %(msg, t.name)
return (False, _("Conflicting Timer(s) detected! %s") %(msg))
except Exception as e:
#obviously some value was not good, return an error
import traceback
print traceback.format_exc()
return ( False, _("Changing the timer for '%s' failed!") % name )
return ( False, _("Could not find timer '%s' with given start and end time!") % name )
#Try adding a new Timer
try:
#Create a new instance of recordtimerentry
timer = RecordTimerEntry(service_ref, begin, end, name, description, eit, disabled, justplay, afterEvent, dirname=dirname, tags=tags)
timer.repeated = repeated
#add the new timer
conflicts = self.recordtimer.record(timer)
if conflicts is None:
return ( True, _("Timer '%s' added") %(timer.name) )
else:
print "[WebComponents.Timer] editTimer conflicting Timers: %s" %(conflicts)
msg = ""
for timer in conflicts:
msg = "%s / %s" %(msg, timer.name)
return (False, _("Conflicting Timer(s) detected! %s") %(msg))
except Exception, e:
#something went wrong, most possibly one of the given paramater-values was wrong
print "[WebComponents.Timer] editTimer exception: %s" %(e)
return ( False, _("Could not add timer '%s'!") % name )
return ( False, "Unexpected Error" )
def addTimerByEventID(self, param):
print "[WebComponents.Timer] addTimerByEventID", param
if param['sRef'] is None:
return ( False, _("Missing Parameter: sRef") )
if param['eventid'] is None:
return ( False, _("Missing Parameter: eventid") )
justplay = False
if param['justplay'] is not None:
if param['justplay'] == "1":
justplay = True
location = preferredTimerPath()
if 'dirname' in param and param['dirname']:
location = param['dirname']
tags = []
if 'tags' in param and param['tags']:
tags = unescape(param['tags']).split(' ')
epgcache = eEPGCache.getInstance()
event = epgcache.lookupEventId(eServiceReference(param['sRef']), int(param['eventid']))
if event is None:
return ( False, _("EventId not found") )
(begin, end, name, description, eit) = parseEvent(event)
timer = RecordTimerEntry(ServiceReference(param['sRef']), begin , end, name, description, eit, False, justplay, AFTEREVENT.AUTO, dirname=location, tags=tags)
conflicts = self.recordtimer.record(timer)
if conflicts is None:
return ( True, _("Timer '%s' added") %(timer.name) )
else:
print "[WebComponents.Timer] editTimer conflicting Timers: %s" %(conflicts)
msg = ""
for timer in conflicts:
msg = "%s / %s" %(msg, timer.name)
return (False, _("Conflicting Timer(s) detected! %s") %(msg))
def writeTimerList(self, force=False):
# is there an easier and better way? :\
if config.plugins.Webinterface.autowritetimer.value or force:
print "Timer.py writing timer to flash"
self.session.nav.RecordTimer.saveTimer()
return ( True, _("TimerList has been saved") )
else:
return ( False, _("TimerList has not been saved") )
def getResult(self):
return self.res
result = property(getResult)
## part for listfiller requests
def getList(self):
timerlist = []
for item in self.recordtimer.timer_list + self.recordtimer.processed_timers:
try:
filename = item.Filename
except AttributeError:
filename = ""
try:
next_activation = item.next_activation
except AttributeError:
next_activation = ""
if item.eit is not None:
event = self.epgcache.lookupEvent(['EX', ("%s" % item.service_ref , 2, item.eit)])
if event and event[0][0] is not None:
extdesc = event[0][0]
else:
extdesc = "N/A"
else:
extdesc = "N/A"
#toggleDisabled
if item.disabled:
disabled = "1"
toggleDisabled = "0"
toggleDisabledImg = "on"
else:
disabled = "0"
toggleDisabled = "1"
toggleDisabledImg = "off"
timerlist.append((
item.service_ref,
item.service_ref.getServiceName(),
item.eit,
item.name,
item.description,
disabled,
item.begin,
item.end,
item.end - item.begin,
item.start_prepare,
1 if item.justplay else 0,
item.afterEvent,
item.dirname,
" ".join(item.tags),
item.log_entries,
item.backoff,
item.first_try_prepare,
item.state,
item.repeated,
1 if item.dontSave else 0,
item.cancelled,
filename,
next_activation,
extdesc,
toggleDisabled,
toggleDisabledImg,
))
return timerlist
list = property(getList)
lut = {
"ServiceReference":0,
"ServiceName": 1,
"EIT":2,
"Name":3,
"Description":4,
"Disabled":5,
"TimeBegin":6,
"TimeEnd":7,
"Duration":8,
"startPrepare":9,
"justPlay":10,
"afterEvent":11,
"Location":12,
"Tags":13,
"LogEntries":14,
"Backoff":15,
"firstTryPrepare":16,
"State":17,
"Repeated":18,
"dontSave":19,
"Cancled":20,
"Filename":21,
"nextActivation":22,
"DescriptionExtended":23,
"toggleDisabled":24,
"toggleDisabledIMG":25,
}
| gpl-2.0 |
linuxscout/arramooz | scripts/verbs/tagsdict.py | 1 | 11610 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# verbdict_functions.py
#
# Copyright 2016 zerrouki <zerrouki@majd4>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import os
import csvdict
import verbdict_functions as vdf
import pyarabic.araby as araby
import spellverbconst as svconst
import spellverb as vspell
import libqutrub.mosaref_main as msrif
import libqutrub.ar_verb as v_ar
import libqutrub.verb_valid as valid
import libqutrub.verb_const as const
import mysam.tagmaker as tagmaker
import alyahmor.verb_affixer as verb_affixer
class TagsDict(csvdict.CsvDict):
""" a virtual converter of data from table to specific Hunspell dictionary format
the data is big, then every function print string """
def __init__(self, version = "N/A", ):
"""
initiate the dict
"""
csvdict.CsvDict.__init__(self, version)
file_conf = os.path.join( os.path.dirname(__file__), "config/tag.config")
self.tagmaker = tagmaker.tagMaker(file_conf)
self.affixer = verb_affixer.verb_affixer()
def add_header(self,):
"""
add the header for new dict
"""
line = "#" + "\n##".join(self.headerlines) +"\n"
return line
def add_record(self, verb_row):
"""
Add a new to the dict
"""
self.id +=1
v = self.treat_tuple(verb_row)
line = ""
# display fields to ensure corectness
VERIFY_INPUT=False;
#~ VERIFY_INPUT = True;
if VERIFY_INPUT:
self.test_entry(v)
# conjugate the verb with speling tags
if not valid.is_valid_infinitive_verb(v['vocalized']):
line += u"#\t\tis invalid verb \n"
else:
future_type = v_ar.get_future_type_entree(v['future_type']);
conjugTable = msrif.do_sarf( v['vocalized'], v['future_type'], v['all'], v['past'],
v['future'], v['passive'], v['imperative'],
v['future_moode'], v['confirmed'], v['transitive'],
"DICT");
TableEntries = {}
if conjugTable:
TableEntries = {}
tags_info = self.get_verb_info(v)
for tense in conjugTable.keys():
# the passive tenses dont take object suffix, only with double transitie verbs
if (v['transitive'] and tense in const.TableIndicativeTense) or v['double_trans']:#:
accept_attached_pronoun = True
else:
accept_attached_pronoun = False
# browes all pronouns
for pronoun in conjugTable[tense].keys():
if pronoun != const.PronounAntuma_f:
tags = self.get_tags(tags_info, tense, pronoun)
flags = svconst.TabPrefixes[tense]['full'];
# the passive tenses dont take object suffix, only with double transitie verbs
if (v['transitive'] and tense in const.TableIndicativeTense) or v['double_trans']:#:
# add flags for suffixes
if v['think_trans'] and v['reflexive_trans']:
flags += svconst.TabSuffixesPronominale[pronoun]['full'];
else:
flags += svconst.TabSuffixes[pronoun]['full'];
# add flag yeh for the الأفعال الخمسة
if tense == const.TenseFuture and pronoun in (const.PronounAnti, const.PronounAntuma, const.PronounAntuma_f,
const.PronounAntum, const.PronounHuma, const.PronounHuma_f, const.PronounHum ):
flags+=u"Ha";
# add double object suffixe, if the verb is double transitive, and the tense is indicative
if v['double_trans'] and tense in const.TableIndicativeTense:
# add flags for suffixes (double object)
flags += svconst.TabDisplayTagDouble[pronoun]['full'];
#add an entree to the table entrie
# this allows to reduce many cases into one entree
if conjugTable[tense][pronoun]:
conj = conjugTable[tense][pronoun]
word_nm = araby.strip_tashkeel(conj);
#~ verb_with_shadda = araby.strip_harakat(v['vocalized']);
print (u'\t'.join([word_nm, v['vocalized'] , tags])).encode('utf8');
# if transitive:
if accept_attached_pronoun:
verb_attached_pronoun_list = self.affixer.vocalize(conj,"",u"ك")
attached = verb_attached_pronoun_list[0][0]
attached = araby.strip_tashkeel(attached)
tags = self.get_tags(tags_info + [u"ضمير متصل"], tense, pronoun)
print (u'\t'.join([attached, v['vocalized'] , tags])).encode('utf8');
return line
def get_verb_info(self, verb_tuple):
"""
Get verb information
"""
# get verb subclass
verb_nm = araby.strip_tashkeel(verb_tuple['vocalized'])
verb_class = ""
verb_tags = [u"فعل"]
if verb_nm.startswith(araby.WAW):
verb_class= "W1W" #"Mithal_W"
verb_tags.extend([u"معتل", u"مثال", u"واوي"])
elif verb_nm[-2:-1] ==araby.ALEF: # before last char
if verb_tuple['future_type'] in (araby.DAMMA, u"ضمة"):
verb_class= "W2W" #"Adjwaf_W"
verb_tags.extend([u"معتل", u"أجوف", u"واوي"])
elif verb_tuple['future_type'] in (araby.KASRA, u"كسرة"):
verb_class= "W2Y" #"Adjwaf_Y"
verb_tags.extend([u"معتل", u"أجوف", u"يائي"])
elif verb_nm[-1:] in (araby.YEH, araby.ALEF_MAKSURA):
verb_class= "W3Y" #"Naqis_Y"
verb_tags.extend([u"معتل", u"ناقص", u"يائي"])
elif verb_nm[-1:] == araby.ALEF:
verb_class= "W3W" #"Naqis_W"
verb_tags.extend([u"معتل", u"ناقص", u"واوي"])
elif araby.SHADDA in (verb_tuple['vocalized']):
verb_class= "Dbl" # doubled
verb_tags.append(u"مضعف")
else:
verb_class = "-"
# the passive tenses dont take object suffix, only with double transitie verbs
tags = "V."+verb_class+"."
if verb_tuple['transitive']:
tags +="T"
verb_tags.append(u"متعدي")
else:
tags +="I"
verb_tags.append(u"لازم")
if verb_tuple['double_trans']:
tags +="D"
verb_tags.append(u"متعدي لمفعولين")
verb_tags.append(u"متعدي")
elif verb_tuple['think_trans']:
tags += "T"
verb_tags.append(u"متعدي للعاقل")
elif verb_tuple['reflexive_trans']:
tags += "R"
verb_tags.append(u"متعدي قلبي")
# tags pronouns
else:
tags +='-'
#~ return tags
return verb_tags
def get_tags(self, verb_info, tense, pronoun ):
"""
Generate tags format
"""
tags = u";".join(verb_info) + ";"
tags_list = []
tags_list.extend(verb_info)
tags_list.append(tense)
tags_list.append(pronoun)
tags += svconst.TabTagsTense[tense]
tags += svconst.TabTagsPronominale[pronoun]
# add encletic and procletic tags
#Affixes ( Procletic + Ecletic)
#Verb procletic :
# W: conjonction: starts by WAW or FEH, take 3 values: W: for waw, F; for Feh, -: none.
# S: future prefix, س+يتعلم
tags += ';'
tags += '-'
#Verb encletic :
#define the extended words added to the lexem: الضمائر المضافة
# H: if have encletic
tags += '-'
#~ return tags
self.tagmaker.reset()
encoded_tags = self.tagmaker.encode(tags_list)
#~ from pyarabic.arabrepr import arepr as repr
#~ print(repr(tags_list))
#~ print(encoded_tags)
return encoded_tags
def test_entry(self, verb_tuple):
"""
Verify entrie
"""
print "------------------------------";
print (u"\t".join(['word', verb_tuple['word']])).encode('utf8');
print (u"\t".join(['future_type', verb_tuple['future_type']])).encode('utf8');
print (u"\t".join(['transitive',str(verb_tuple['transitive']), ])).encode('utf8');
print (u"\t".join(['double_trans',str(verb_tuple['double_trans']), ])).encode('utf8');
print (u"\t".join(['think_trans',str(verb_tuple['think_trans']), ])).encode('utf8');
print (u"\t".join(['unthink_trans',str(verb_tuple['unthink_trans']), ])).encode('utf8');
print (u"\t".join(['reflexive_trans',str(verb_tuple['reflexive_trans']), ])).encode('utf8');
if all:
tenses=u"يعملان";
else:
tenses=u"";
if verb_tuple['past']: tenses+=u"ي";
else: tenses+="-";
if verb_tuple['future']: tenses+=u"ع";
else: tenses+="-";
if verb_tuple['imperative']: tenses+=u"م";
else: tenses+="-";
if verb_tuple['passive']: tenses+=u"ل";
else: tenses+=u"-";
if verb_tuple['future_moode']: tenses+=u"ا";
else: tenses+=u"-";
if verb_tuple['confirmed']: tenses+=u"ن";
else: tenses+=u"-";
print (u"\t".join(['tense', tenses])).encode('utf8');
print "------------------------------";
def add_footer(self):
"""close the data set, used for ending xml, or sql"""
return """"""
| gpl-2.0 |
paulsmith/geodjango | django/contrib/webdesign/templatetags/webdesign.py | 350 | 2196 | from django.contrib.webdesign.lorem_ipsum import words, paragraphs
from django import template
register = template.Library()
class LoremNode(template.Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return u'\n\n'.join(paras)
#@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise template.TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
lorem = register.tag(lorem)
| bsd-3-clause |
111pontes/ydk-py | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_hsrp_oper.py | 1 | 198152 | """ Cisco_IOS_XR_ipv4_hsrp_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv4\-hsrp package operational data.
This module contains definitions
for the following management objects\:
hsrp\: HSRP operational data
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class HsrpBAfEnum(Enum):
"""
HsrpBAfEnum
Hsrp b af
.. data:: ipv4 = 0
IPv4 Address Family
.. data:: ipv6 = 1
IPv6 Address Family
.. data:: count = 2
The number of supported address families
"""
ipv4 = 0
ipv6 = 1
count = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['HsrpBAfEnum']
class HsrpBfdSessionStateEnum(Enum):
"""
HsrpBfdSessionStateEnum
Hsrp bfd session state
.. data:: bfd_state_none = 0
None
.. data:: bfd_state_inactive = 1
Inactive
.. data:: bfd_state_up = 2
Up
.. data:: bfd_state_down = 3
Down
"""
bfd_state_none = 0
bfd_state_inactive = 1
bfd_state_up = 2
bfd_state_down = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['HsrpBfdSessionStateEnum']
class HsrpStateChangeReasonEnum(Enum):
"""
HsrpStateChangeReasonEnum
Hsrp state change reason
.. data:: state_change_bfd_down = 0
BFD session down
.. data:: state_change_vip_learnt = 1
Virtual IP learnt
.. data:: state_change_interface_ip = 2
Interface IP update
.. data:: state_change_delay_timer = 3
Delay timer expired
.. data:: state_change_startup = 4
Ready on startup
.. data:: state_change_shutdown = 5
HSRP shut down
.. data:: state_change_interface_up = 6
Interface Up update
.. data:: state_change_interface_down = 7
Interface Down update
.. data:: state_change_active_timer = 8
Active timer expired
.. data:: state_change_standby_timer = 9
Standby timer expired
.. data:: state_change_resign = 10
Resign received
.. data:: state_change_coup = 11
Coup received
.. data:: state_change_higher_priority_speak = 12
Higher priority speak received
.. data:: state_change_higher_priority_standby = 13
Higher priority standby received
.. data:: state_change_lower_priority_standby = 14
Lower priority standby received
.. data:: state_change_higher_priority_active = 15
Higher priority active received
.. data:: state_change_lower_priority_active = 16
Lower priority active received
.. data:: state_change_virtual_ip_configured = 17
Virtual IP configured
.. data:: state_change_virtual_ip_lost = 18
Virtual IP lost
.. data:: state_change_recovered_from_checkpoint = 19
Recovered from checkpoint
.. data:: state_change_mac_update = 20
MAC address update
.. data:: state_change_admin = 21
Forwarder Admin state change
.. data:: state_change_parent = 22
MGO parent change
.. data:: state_change_chkpt_update = 23
Checkpoint update from Primary HSRP instance
.. data:: state_change_issu_resync = 24
Resync following ISSU primary event
.. data:: state_change_max = 25
Maximum reason in enumeration
"""
state_change_bfd_down = 0
state_change_vip_learnt = 1
state_change_interface_ip = 2
state_change_delay_timer = 3
state_change_startup = 4
state_change_shutdown = 5
state_change_interface_up = 6
state_change_interface_down = 7
state_change_active_timer = 8
state_change_standby_timer = 9
state_change_resign = 10
state_change_coup = 11
state_change_higher_priority_speak = 12
state_change_higher_priority_standby = 13
state_change_lower_priority_standby = 14
state_change_higher_priority_active = 15
state_change_lower_priority_active = 16
state_change_virtual_ip_configured = 17
state_change_virtual_ip_lost = 18
state_change_recovered_from_checkpoint = 19
state_change_mac_update = 20
state_change_admin = 21
state_change_parent = 22
state_change_chkpt_update = 23
state_change_issu_resync = 24
state_change_max = 25
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['HsrpStateChangeReasonEnum']
class HsrpVmacStateEnum(Enum):
"""
HsrpVmacStateEnum
Hsrp vmac state
.. data:: stored = 0
VMAC stored locally
.. data:: reserved = 1
VMAC reserved in mac table
.. data:: active = 2
VMAC active in mac table
.. data:: reserving = 3
VMAC not yet reserved in mac table
"""
stored = 0
reserved = 1
active = 2
reserving = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['HsrpVmacStateEnum']
class StandbyGrpStateEnum(Enum):
"""
StandbyGrpStateEnum
Standby grp state
.. data:: state_initial = 1
Initial
.. data:: state_learn = 2
Learn
.. data:: state_listen = 3
Listen
.. data:: state_speak = 4
Speak
.. data:: state_standby = 5
Standby
.. data:: state_active = 6
Active
"""
state_initial = 1
state_learn = 2
state_listen = 3
state_speak = 4
state_standby = 5
state_active = 6
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['StandbyGrpStateEnum']
class Hsrp(object):
"""
HSRP operational data
.. attribute:: bfd_sessions
The table of HSRP BFD Sessions
**type**\: :py:class:`BfdSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.BfdSessions>`
.. attribute:: ipv4
IPv4 HSRP information
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4>`
.. attribute:: ipv6
IPv6 HSRP information
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6>`
.. attribute:: mgo_sessions
HSRP MGO session table
**type**\: :py:class:`MgoSessions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.MgoSessions>`
.. attribute:: summary
HSRP summary statistics
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Summary>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.bfd_sessions = Hsrp.BfdSessions()
self.bfd_sessions.parent = self
self.ipv4 = Hsrp.Ipv4()
self.ipv4.parent = self
self.ipv6 = Hsrp.Ipv6()
self.ipv6.parent = self
self.mgo_sessions = Hsrp.MgoSessions()
self.mgo_sessions.parent = self
self.summary = Hsrp.Summary()
self.summary.parent = self
class Ipv4(object):
"""
IPv4 HSRP information
.. attribute:: groups
The HSRP standby group table
**type**\: :py:class:`Groups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups>`
.. attribute:: interfaces
The HSRP interface information table
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Interfaces>`
.. attribute:: tracked_interfaces
The HSRP tracked interfaces table
**type**\: :py:class:`TrackedInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.TrackedInterfaces>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.groups = Hsrp.Ipv4.Groups()
self.groups.parent = self
self.interfaces = Hsrp.Ipv4.Interfaces()
self.interfaces.parent = self
self.tracked_interfaces = Hsrp.Ipv4.TrackedInterfaces()
self.tracked_interfaces.parent = self
class Groups(object):
"""
The HSRP standby group table
.. attribute:: group
An HSRP standby group
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.group = YList()
self.group.parent = self
self.group.name = 'group'
class Group(object):
"""
An HSRP standby group
.. attribute:: interface_name <key>
The interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: group_number <key>
The HSRP group number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: active_ip_address
Active router's IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: active_ipv6_address
Active router's IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: active_mac_address
Active router's interface MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: active_priority
Priority of the Active router
**type**\: int
**range:** 0..255
.. attribute:: active_timer_flag
Active timer running flag
**type**\: bool
.. attribute:: active_timer_msecs
Active timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: active_timer_secs
Active timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: address_family
Address family
**type**\: :py:class:`HsrpBAfEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBAfEnum>`
.. attribute:: authentication_string
Authentication string
**type**\: str
**length:** 0..9
.. attribute:: bfd_enabled
HSRP BFD fast failover
**type**\: bool
.. attribute:: bfd_interface
BFD Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: bfd_interval
BFD packet send interval
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_multiplier
BFD multiplier
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_peer_ip_address
BFD Peer IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: bfd_peer_ipv6_address
BFD Peer IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: bfd_session_state
BFD session state
**type**\: :py:class:`HsrpBfdSessionStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBfdSessionStateEnum>`
.. attribute:: configured_mac_address
MAC address configured
**type**\: bool
.. attribute:: configured_priority
Configured priority
**type**\: int
**range:** 0..255
.. attribute:: configured_timers
Non\-default timers are configured
**type**\: bool
.. attribute:: coup_received_time
Time last coup was received
**type**\: :py:class:`CoupReceivedTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.CoupReceivedTime>`
.. attribute:: coup_sent_time
Time last coup was sent
**type**\: :py:class:`CoupSentTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.CoupSentTime>`
.. attribute:: current_state_timer_secs
Time in current state secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: delay_timer_flag
Delay timer running flag
**type**\: bool
.. attribute:: delay_timer_msecs
Delay timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: delay_timer_secs
Delay timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: followed_session_name
Followed Session Name
**type**\: str
**length:** 0..16
.. attribute:: global_address
Global virtual IPv6 addresses
**type**\: list of :py:class:`GlobalAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.GlobalAddress>`
.. attribute:: hello_time
Hellotime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hello_timer_flag
Hello timer running flag
**type**\: bool
.. attribute:: hello_timer_msecs
Hello timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hello_timer_secs
Hello timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: hold_time
Holdtime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hsrp_group_number
HSRP Group number
**type**\: int
**range:** 0..4294967295
.. attribute:: hsrp_router_state
HSRP router state
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface_name_xr
Interface Name
**type**\: str
**length:** 0..64
.. attribute:: is_slave
Group is a slave group
**type**\: bool
.. attribute:: learned_hello_time
Learned hellotime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: learned_hold_time
Learned holdtime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: min_delay_time
Minimum delay time in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: preempt_delay
Preempt delay time in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: preempt_enabled
Preempt enabled
**type**\: bool
.. attribute:: preempt_timer_secs
Preempt time remaining in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: redirects_disabled
HSRP redirects disabled
**type**\: bool
.. attribute:: reload_delay_time
Reload delay time in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: resign_received_time
Time last resign was received
**type**\: :py:class:`ResignReceivedTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.ResignReceivedTime>`
.. attribute:: resign_sent_time
Time last resign was sent
**type**\: :py:class:`ResignSentTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.ResignSentTime>`
.. attribute:: router_priority
Priority of the router
**type**\: int
**range:** 0..255
.. attribute:: secondary_address
Secondary virtual IP addresses
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: session_name
Session Name
**type**\: str
**length:** 0..16
.. attribute:: slaves
Number of slaves following state
**type**\: int
**range:** 0..4294967295
.. attribute:: standby_ip_address
Standby router's IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: standby_ipv6_address
Standby router's IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: standby_mac_address
Standby router's interface MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: standby_timer_flag
Standby timer running flag
**type**\: bool
.. attribute:: standby_timer_msecs
Standby timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: standby_timer_secs
Standby timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state_change_count
Number of state changes
**type**\: int
**range:** 0..4294967295
.. attribute:: state_change_history
State change history
**type**\: list of :py:class:`StateChangeHistory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.StateChangeHistory>`
.. attribute:: statistics
HSRP Group statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.Statistics>`
.. attribute:: tracked_interface_count
Number of tracked interfaces
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interface_up_count
Number of tracked interfaces up
**type**\: int
**range:** 0..4294967295
.. attribute:: use_bia_configured
Use burnt in MAC address configured
**type**\: bool
.. attribute:: use_configured_timers
Use configured timers
**type**\: bool
.. attribute:: use_configured_virtual_ip
Use configured virtual IP
**type**\: bool
.. attribute:: version
HSRP Protocol Version
**type**\: int
**range:** 0..255
.. attribute:: virtual_ip_address
Configured Virtual IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_linklocal_ipv6_address
Virtual linklocal IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_mac_address
Virtual mac address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: virtual_mac_address_state
Virtual mac address state
**type**\: :py:class:`HsrpVmacStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpVmacStateEnum>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.group_number = None
self.active_ip_address = None
self.active_ipv6_address = None
self.active_mac_address = None
self.active_priority = None
self.active_timer_flag = None
self.active_timer_msecs = None
self.active_timer_secs = None
self.address_family = None
self.authentication_string = None
self.bfd_enabled = None
self.bfd_interface = None
self.bfd_interval = None
self.bfd_multiplier = None
self.bfd_peer_ip_address = None
self.bfd_peer_ipv6_address = None
self.bfd_session_state = None
self.configured_mac_address = None
self.configured_priority = None
self.configured_timers = None
self.coup_received_time = Hsrp.Ipv4.Groups.Group.CoupReceivedTime()
self.coup_received_time.parent = self
self.coup_sent_time = Hsrp.Ipv4.Groups.Group.CoupSentTime()
self.coup_sent_time.parent = self
self.current_state_timer_secs = None
self.delay_timer_flag = None
self.delay_timer_msecs = None
self.delay_timer_secs = None
self.followed_session_name = None
self.global_address = YList()
self.global_address.parent = self
self.global_address.name = 'global_address'
self.hello_time = None
self.hello_timer_flag = None
self.hello_timer_msecs = None
self.hello_timer_secs = None
self.hold_time = None
self.hsrp_group_number = None
self.hsrp_router_state = None
self.interface = None
self.interface_name_xr = None
self.is_slave = None
self.learned_hello_time = None
self.learned_hold_time = None
self.min_delay_time = None
self.preempt_delay = None
self.preempt_enabled = None
self.preempt_timer_secs = None
self.redirects_disabled = None
self.reload_delay_time = None
self.resign_received_time = Hsrp.Ipv4.Groups.Group.ResignReceivedTime()
self.resign_received_time.parent = self
self.resign_sent_time = Hsrp.Ipv4.Groups.Group.ResignSentTime()
self.resign_sent_time.parent = self
self.router_priority = None
self.secondary_address = YLeafList()
self.secondary_address.parent = self
self.secondary_address.name = 'secondary_address'
self.session_name = None
self.slaves = None
self.standby_ip_address = None
self.standby_ipv6_address = None
self.standby_mac_address = None
self.standby_timer_flag = None
self.standby_timer_msecs = None
self.standby_timer_secs = None
self.state_change_count = None
self.state_change_history = YList()
self.state_change_history.parent = self
self.state_change_history.name = 'state_change_history'
self.statistics = Hsrp.Ipv4.Groups.Group.Statistics()
self.statistics.parent = self
self.tracked_interface_count = None
self.tracked_interface_up_count = None
self.use_bia_configured = None
self.use_configured_timers = None
self.use_configured_virtual_ip = None
self.version = None
self.virtual_ip_address = None
self.virtual_linklocal_ipv6_address = None
self.virtual_mac_address = None
self.virtual_mac_address_state = None
class ResignSentTime(object):
"""
Time last resign was sent
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:resign-sent-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.ResignSentTime']['meta_info']
class ResignReceivedTime(object):
"""
Time last resign was received
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:resign-received-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.ResignReceivedTime']['meta_info']
class CoupSentTime(object):
"""
Time last coup was sent
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:coup-sent-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.CoupSentTime']['meta_info']
class CoupReceivedTime(object):
"""
Time last coup was received
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:coup-received-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.CoupReceivedTime']['meta_info']
class Statistics(object):
"""
HSRP Group statistics
.. attribute:: active_transitions
Number of transitions to Active State
**type**\: int
**range:** 0..4294967295
.. attribute:: auth_fail_received
Number of Packets received that failed authentication
**type**\: int
**range:** 0..4294967295
.. attribute:: coup_packets_received
Number of Coup Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: coup_packets_sent
Number of Coup Packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets_received
Number of Hello Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets_sent
Number of Hello Packets sent (NB\: Bundles only)
**type**\: int
**range:** 0..4294967295
.. attribute:: init_transitions
Number of transitions to Init State
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_timer_received
Number of packets received with invalid Hello Time value
**type**\: int
**range:** 0..4294967295
.. attribute:: learn_transitions
Number of transitions to Learn State
**type**\: int
**range:** 0..4294967295
.. attribute:: listen_transitions
Number of transitions to Listen State
**type**\: int
**range:** 0..4294967295
.. attribute:: mismatch_virtual_ip_address_received
Number of packets received with mismatching virtual IP address
**type**\: int
**range:** 0..4294967295
.. attribute:: resign_packets_received
Number of Resign Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: resign_packets_sent
Number of Resign Packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: speak_transitions
Number of transitions to Speak State
**type**\: int
**range:** 0..4294967295
.. attribute:: standby_transitions
Number of transitions to Standby State
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.active_transitions = None
self.auth_fail_received = None
self.coup_packets_received = None
self.coup_packets_sent = None
self.hello_packets_received = None
self.hello_packets_sent = None
self.init_transitions = None
self.invalid_timer_received = None
self.learn_transitions = None
self.listen_transitions = None
self.mismatch_virtual_ip_address_received = None
self.resign_packets_received = None
self.resign_packets_sent = None
self.speak_transitions = None
self.standby_transitions = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.active_transitions is not None:
return True
if self.auth_fail_received is not None:
return True
if self.coup_packets_received is not None:
return True
if self.coup_packets_sent is not None:
return True
if self.hello_packets_received is not None:
return True
if self.hello_packets_sent is not None:
return True
if self.init_transitions is not None:
return True
if self.invalid_timer_received is not None:
return True
if self.learn_transitions is not None:
return True
if self.listen_transitions is not None:
return True
if self.mismatch_virtual_ip_address_received is not None:
return True
if self.resign_packets_received is not None:
return True
if self.resign_packets_sent is not None:
return True
if self.speak_transitions is not None:
return True
if self.standby_transitions is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.Statistics']['meta_info']
class GlobalAddress(object):
"""
Global virtual IPv6 addresses
.. attribute:: ipv6_address
IPV6Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:global-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.GlobalAddress']['meta_info']
class StateChangeHistory(object):
"""
State change history
.. attribute:: new_state
New State
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: old_state
Old State
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: reason
Reason for state change
**type**\: :py:class:`HsrpStateChangeReasonEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpStateChangeReasonEnum>`
.. attribute:: time
Time of state change
**type**\: :py:class:`Time <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Groups.Group.StateChangeHistory.Time>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.new_state = None
self.old_state = None
self.reason = None
self.time = Hsrp.Ipv4.Groups.Group.StateChangeHistory.Time()
self.time.parent = self
class Time(object):
"""
Time of state change
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.StateChangeHistory.Time']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:state-change-history'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.new_state is not None:
return True
if self.old_state is not None:
return True
if self.reason is not None:
return True
if self.time is not None and self.time._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group.StateChangeHistory']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
if self.group_number is None:
raise YPYModelError('Key property group_number is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:groups/Cisco-IOS-XR-ipv4-hsrp-oper:group[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + '][Cisco-IOS-XR-ipv4-hsrp-oper:group-number = ' + str(self.group_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.group_number is not None:
return True
if self.active_ip_address is not None:
return True
if self.active_ipv6_address is not None:
return True
if self.active_mac_address is not None:
return True
if self.active_priority is not None:
return True
if self.active_timer_flag is not None:
return True
if self.active_timer_msecs is not None:
return True
if self.active_timer_secs is not None:
return True
if self.address_family is not None:
return True
if self.authentication_string is not None:
return True
if self.bfd_enabled is not None:
return True
if self.bfd_interface is not None:
return True
if self.bfd_interval is not None:
return True
if self.bfd_multiplier is not None:
return True
if self.bfd_peer_ip_address is not None:
return True
if self.bfd_peer_ipv6_address is not None:
return True
if self.bfd_session_state is not None:
return True
if self.configured_mac_address is not None:
return True
if self.configured_priority is not None:
return True
if self.configured_timers is not None:
return True
if self.coup_received_time is not None and self.coup_received_time._has_data():
return True
if self.coup_sent_time is not None and self.coup_sent_time._has_data():
return True
if self.current_state_timer_secs is not None:
return True
if self.delay_timer_flag is not None:
return True
if self.delay_timer_msecs is not None:
return True
if self.delay_timer_secs is not None:
return True
if self.followed_session_name is not None:
return True
if self.global_address is not None:
for child_ref in self.global_address:
if child_ref._has_data():
return True
if self.hello_time is not None:
return True
if self.hello_timer_flag is not None:
return True
if self.hello_timer_msecs is not None:
return True
if self.hello_timer_secs is not None:
return True
if self.hold_time is not None:
return True
if self.hsrp_group_number is not None:
return True
if self.hsrp_router_state is not None:
return True
if self.interface is not None:
return True
if self.interface_name_xr is not None:
return True
if self.is_slave is not None:
return True
if self.learned_hello_time is not None:
return True
if self.learned_hold_time is not None:
return True
if self.min_delay_time is not None:
return True
if self.preempt_delay is not None:
return True
if self.preempt_enabled is not None:
return True
if self.preempt_timer_secs is not None:
return True
if self.redirects_disabled is not None:
return True
if self.reload_delay_time is not None:
return True
if self.resign_received_time is not None and self.resign_received_time._has_data():
return True
if self.resign_sent_time is not None and self.resign_sent_time._has_data():
return True
if self.router_priority is not None:
return True
if self.secondary_address is not None:
for child in self.secondary_address:
if child is not None:
return True
if self.session_name is not None:
return True
if self.slaves is not None:
return True
if self.standby_ip_address is not None:
return True
if self.standby_ipv6_address is not None:
return True
if self.standby_mac_address is not None:
return True
if self.standby_timer_flag is not None:
return True
if self.standby_timer_msecs is not None:
return True
if self.standby_timer_secs is not None:
return True
if self.state_change_count is not None:
return True
if self.state_change_history is not None:
for child_ref in self.state_change_history:
if child_ref._has_data():
return True
if self.statistics is not None and self.statistics._has_data():
return True
if self.tracked_interface_count is not None:
return True
if self.tracked_interface_up_count is not None:
return True
if self.use_bia_configured is not None:
return True
if self.use_configured_timers is not None:
return True
if self.use_configured_virtual_ip is not None:
return True
if self.version is not None:
return True
if self.virtual_ip_address is not None:
return True
if self.virtual_linklocal_ipv6_address is not None:
return True
if self.virtual_mac_address is not None:
return True
if self.virtual_mac_address_state is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups.Group']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.group is not None:
for child_ref in self.group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Groups']['meta_info']
class TrackedInterfaces(object):
"""
The HSRP tracked interfaces table
.. attribute:: tracked_interface
An HSRP tracked interface entry
**type**\: list of :py:class:`TrackedInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.TrackedInterfaces.TrackedInterface>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tracked_interface = YList()
self.tracked_interface.parent = self
self.tracked_interface.name = 'tracked_interface'
class TrackedInterface(object):
"""
An HSRP tracked interface entry
.. attribute:: interface_name <key>
The interface name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: group_number <key>
The HSRP group number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: tracked_interface_name <key>
The interface name of the interface being tracked
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: hsrp_group_number
HSRP Group number
**type**\: int
**range:** 0..4294967295
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface_up_flag
Interface up flag
**type**\: bool
.. attribute:: is_object
Tracked Object Flag
**type**\: bool
.. attribute:: priority_decrement
Priority weighting
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interface_name_xr
Tracked Interface Name
**type**\: str
**length:** 0..64
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.group_number = None
self.tracked_interface_name = None
self.hsrp_group_number = None
self.interface = None
self.interface_up_flag = None
self.is_object = None
self.priority_decrement = None
self.tracked_interface_name_xr = None
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
if self.group_number is None:
raise YPYModelError('Key property group_number is None')
if self.tracked_interface_name is None:
raise YPYModelError('Key property tracked_interface_name is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interfaces/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interface[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + '][Cisco-IOS-XR-ipv4-hsrp-oper:group-number = ' + str(self.group_number) + '][Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interface-name = ' + str(self.tracked_interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.group_number is not None:
return True
if self.tracked_interface_name is not None:
return True
if self.hsrp_group_number is not None:
return True
if self.interface is not None:
return True
if self.interface_up_flag is not None:
return True
if self.is_object is not None:
return True
if self.priority_decrement is not None:
return True
if self.tracked_interface_name_xr is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.TrackedInterfaces.TrackedInterface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tracked_interface is not None:
for child_ref in self.tracked_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.TrackedInterfaces']['meta_info']
class Interfaces(object):
"""
The HSRP interface information table
.. attribute:: interface
A HSRP interface entry
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Interfaces.Interface>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
A HSRP interface entry
.. attribute:: interface_name <key>
The interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: statistics
HSRP Interface Statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv4.Interfaces.Interface.Statistics>`
.. attribute:: use_bia_flag
Use burnt in mac address flag
**type**\: bool
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.interface = None
self.statistics = Hsrp.Ipv4.Interfaces.Interface.Statistics()
self.statistics.parent = self
self.use_bia_flag = None
class Statistics(object):
"""
HSRP Interface Statistics
.. attribute:: advert_packets_received
Number of advertisement packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: advert_packets_sent
Number of advertisement packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: conflict_source_ip_address_received
Number of packets received from a conflicting Source IP address
**type**\: int
**range:** 0..4294967295
.. attribute:: inoperational_group_received
Number of packets received for an inoperational group
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_operation_code_received
Number of packets received with invalid operation code
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_version_received
Number of packets received with invalid version
**type**\: int
**range:** 0..4294967295
.. attribute:: long_packets_received
Number of packets received that were too Long
**type**\: int
**range:** 0..4294967295
.. attribute:: short_packets_received
Number of packets received that were too short
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_group_received
Number of packets received for an unknown group id
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.advert_packets_received = None
self.advert_packets_sent = None
self.conflict_source_ip_address_received = None
self.inoperational_group_received = None
self.invalid_operation_code_received = None
self.invalid_version_received = None
self.long_packets_received = None
self.short_packets_received = None
self.unknown_group_received = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.advert_packets_received is not None:
return True
if self.advert_packets_sent is not None:
return True
if self.conflict_source_ip_address_received is not None:
return True
if self.inoperational_group_received is not None:
return True
if self.invalid_operation_code_received is not None:
return True
if self.invalid_version_received is not None:
return True
if self.long_packets_received is not None:
return True
if self.short_packets_received is not None:
return True
if self.unknown_group_received is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Interfaces.Interface.Statistics']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:interfaces/Cisco-IOS-XR-ipv4-hsrp-oper:interface[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.interface is not None:
return True
if self.statistics is not None and self.statistics._has_data():
return True
if self.use_bia_flag is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4/Cisco-IOS-XR-ipv4-hsrp-oper:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4.Interfaces']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv4'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.groups is not None and self.groups._has_data():
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
if self.tracked_interfaces is not None and self.tracked_interfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv4']['meta_info']
class MgoSessions(object):
"""
HSRP MGO session table
.. attribute:: mgo_session
HSRP MGO session
**type**\: list of :py:class:`MgoSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.MgoSessions.MgoSession>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.mgo_session = YList()
self.mgo_session.parent = self
self.mgo_session.name = 'mgo_session'
class MgoSession(object):
"""
HSRP MGO session
.. attribute:: session_name <key>
HSRP MGO session name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: primary_af_name
Address family of primary session
**type**\: :py:class:`HsrpBAfEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBAfEnum>`
.. attribute:: primary_session_interface
Interface of primary session
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: primary_session_name
Session Name
**type**\: str
**length:** 0..16
.. attribute:: primary_session_number
Group number of primary session
**type**\: int
**range:** 0..4294967295
.. attribute:: primary_session_state
State of primary session
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: slave
List of slaves following this primary session
**type**\: list of :py:class:`Slave <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.MgoSessions.MgoSession.Slave>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.session_name = None
self.primary_af_name = None
self.primary_session_interface = None
self.primary_session_name = None
self.primary_session_number = None
self.primary_session_state = None
self.slave = YList()
self.slave.parent = self
self.slave.name = 'slave'
class Slave(object):
"""
List of slaves following this primary session
.. attribute:: slave_group_interface
Interface of slave group
**type**\: str
**length:** 0..64
.. attribute:: slave_group_number
Group number of slave group
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.slave_group_interface = None
self.slave_group_number = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:slave'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.slave_group_interface is not None:
return True
if self.slave_group_number is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.MgoSessions.MgoSession.Slave']['meta_info']
@property
def _common_path(self):
if self.session_name is None:
raise YPYModelError('Key property session_name is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:mgo-sessions/Cisco-IOS-XR-ipv4-hsrp-oper:mgo-session[Cisco-IOS-XR-ipv4-hsrp-oper:session-name = ' + str(self.session_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.session_name is not None:
return True
if self.primary_af_name is not None:
return True
if self.primary_session_interface is not None:
return True
if self.primary_session_name is not None:
return True
if self.primary_session_number is not None:
return True
if self.primary_session_state is not None:
return True
if self.slave is not None:
for child_ref in self.slave:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.MgoSessions.MgoSession']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:mgo-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.mgo_session is not None:
for child_ref in self.mgo_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.MgoSessions']['meta_info']
class Ipv6(object):
"""
IPv6 HSRP information
.. attribute:: groups
The HSRP standby group table
**type**\: :py:class:`Groups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups>`
.. attribute:: interfaces
The HSRP interface information table
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Interfaces>`
.. attribute:: tracked_interfaces
The HSRP tracked interfaces table
**type**\: :py:class:`TrackedInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.TrackedInterfaces>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.groups = Hsrp.Ipv6.Groups()
self.groups.parent = self
self.interfaces = Hsrp.Ipv6.Interfaces()
self.interfaces.parent = self
self.tracked_interfaces = Hsrp.Ipv6.TrackedInterfaces()
self.tracked_interfaces.parent = self
class TrackedInterfaces(object):
"""
The HSRP tracked interfaces table
.. attribute:: tracked_interface
An HSRP tracked interface entry
**type**\: list of :py:class:`TrackedInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.TrackedInterfaces.TrackedInterface>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.tracked_interface = YList()
self.tracked_interface.parent = self
self.tracked_interface.name = 'tracked_interface'
class TrackedInterface(object):
"""
An HSRP tracked interface entry
.. attribute:: interface_name <key>
The interface name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: group_number <key>
The HSRP group number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: tracked_interface_name <key>
The interface name of the interface being tracked
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: hsrp_group_number
HSRP Group number
**type**\: int
**range:** 0..4294967295
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface_up_flag
Interface up flag
**type**\: bool
.. attribute:: is_object
Tracked Object Flag
**type**\: bool
.. attribute:: priority_decrement
Priority weighting
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interface_name_xr
Tracked Interface Name
**type**\: str
**length:** 0..64
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.group_number = None
self.tracked_interface_name = None
self.hsrp_group_number = None
self.interface = None
self.interface_up_flag = None
self.is_object = None
self.priority_decrement = None
self.tracked_interface_name_xr = None
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
if self.group_number is None:
raise YPYModelError('Key property group_number is None')
if self.tracked_interface_name is None:
raise YPYModelError('Key property tracked_interface_name is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interfaces/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interface[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + '][Cisco-IOS-XR-ipv4-hsrp-oper:group-number = ' + str(self.group_number) + '][Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interface-name = ' + str(self.tracked_interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.group_number is not None:
return True
if self.tracked_interface_name is not None:
return True
if self.hsrp_group_number is not None:
return True
if self.interface is not None:
return True
if self.interface_up_flag is not None:
return True
if self.is_object is not None:
return True
if self.priority_decrement is not None:
return True
if self.tracked_interface_name_xr is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.TrackedInterfaces.TrackedInterface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:tracked-interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.tracked_interface is not None:
for child_ref in self.tracked_interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.TrackedInterfaces']['meta_info']
class Groups(object):
"""
The HSRP standby group table
.. attribute:: group
An HSRP standby group
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.group = YList()
self.group.parent = self
self.group.name = 'group'
class Group(object):
"""
An HSRP standby group
.. attribute:: interface_name <key>
The interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: group_number <key>
The HSRP group number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: active_ip_address
Active router's IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: active_ipv6_address
Active router's IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: active_mac_address
Active router's interface MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: active_priority
Priority of the Active router
**type**\: int
**range:** 0..255
.. attribute:: active_timer_flag
Active timer running flag
**type**\: bool
.. attribute:: active_timer_msecs
Active timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: active_timer_secs
Active timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: address_family
Address family
**type**\: :py:class:`HsrpBAfEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBAfEnum>`
.. attribute:: authentication_string
Authentication string
**type**\: str
**length:** 0..9
.. attribute:: bfd_enabled
HSRP BFD fast failover
**type**\: bool
.. attribute:: bfd_interface
BFD Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: bfd_interval
BFD packet send interval
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_multiplier
BFD multiplier
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_peer_ip_address
BFD Peer IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: bfd_peer_ipv6_address
BFD Peer IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: bfd_session_state
BFD session state
**type**\: :py:class:`HsrpBfdSessionStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBfdSessionStateEnum>`
.. attribute:: configured_mac_address
MAC address configured
**type**\: bool
.. attribute:: configured_priority
Configured priority
**type**\: int
**range:** 0..255
.. attribute:: configured_timers
Non\-default timers are configured
**type**\: bool
.. attribute:: coup_received_time
Time last coup was received
**type**\: :py:class:`CoupReceivedTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.CoupReceivedTime>`
.. attribute:: coup_sent_time
Time last coup was sent
**type**\: :py:class:`CoupSentTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.CoupSentTime>`
.. attribute:: current_state_timer_secs
Time in current state secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: delay_timer_flag
Delay timer running flag
**type**\: bool
.. attribute:: delay_timer_msecs
Delay timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: delay_timer_secs
Delay timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: followed_session_name
Followed Session Name
**type**\: str
**length:** 0..16
.. attribute:: global_address
Global virtual IPv6 addresses
**type**\: list of :py:class:`GlobalAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.GlobalAddress>`
.. attribute:: hello_time
Hellotime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hello_timer_flag
Hello timer running flag
**type**\: bool
.. attribute:: hello_timer_msecs
Hello timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hello_timer_secs
Hello timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: hold_time
Holdtime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: hsrp_group_number
HSRP Group number
**type**\: int
**range:** 0..4294967295
.. attribute:: hsrp_router_state
HSRP router state
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface_name_xr
Interface Name
**type**\: str
**length:** 0..64
.. attribute:: is_slave
Group is a slave group
**type**\: bool
.. attribute:: learned_hello_time
Learned hellotime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: learned_hold_time
Learned holdtime in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: min_delay_time
Minimum delay time in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: preempt_delay
Preempt delay time in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: preempt_enabled
Preempt enabled
**type**\: bool
.. attribute:: preempt_timer_secs
Preempt time remaining in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: redirects_disabled
HSRP redirects disabled
**type**\: bool
.. attribute:: reload_delay_time
Reload delay time in msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: resign_received_time
Time last resign was received
**type**\: :py:class:`ResignReceivedTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.ResignReceivedTime>`
.. attribute:: resign_sent_time
Time last resign was sent
**type**\: :py:class:`ResignSentTime <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.ResignSentTime>`
.. attribute:: router_priority
Priority of the router
**type**\: int
**range:** 0..255
.. attribute:: secondary_address
Secondary virtual IP addresses
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: session_name
Session Name
**type**\: str
**length:** 0..16
.. attribute:: slaves
Number of slaves following state
**type**\: int
**range:** 0..4294967295
.. attribute:: standby_ip_address
Standby router's IP address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: standby_ipv6_address
Standby router's IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: standby_mac_address
Standby router's interface MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: standby_timer_flag
Standby timer running flag
**type**\: bool
.. attribute:: standby_timer_msecs
Standby timer running time msecs
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: standby_timer_secs
Standby timer running time secs
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: state_change_count
Number of state changes
**type**\: int
**range:** 0..4294967295
.. attribute:: state_change_history
State change history
**type**\: list of :py:class:`StateChangeHistory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.StateChangeHistory>`
.. attribute:: statistics
HSRP Group statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.Statistics>`
.. attribute:: tracked_interface_count
Number of tracked interfaces
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interface_up_count
Number of tracked interfaces up
**type**\: int
**range:** 0..4294967295
.. attribute:: use_bia_configured
Use burnt in MAC address configured
**type**\: bool
.. attribute:: use_configured_timers
Use configured timers
**type**\: bool
.. attribute:: use_configured_virtual_ip
Use configured virtual IP
**type**\: bool
.. attribute:: version
HSRP Protocol Version
**type**\: int
**range:** 0..255
.. attribute:: virtual_ip_address
Configured Virtual IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_linklocal_ipv6_address
Virtual linklocal IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: virtual_mac_address
Virtual mac address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: virtual_mac_address_state
Virtual mac address state
**type**\: :py:class:`HsrpVmacStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpVmacStateEnum>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.group_number = None
self.active_ip_address = None
self.active_ipv6_address = None
self.active_mac_address = None
self.active_priority = None
self.active_timer_flag = None
self.active_timer_msecs = None
self.active_timer_secs = None
self.address_family = None
self.authentication_string = None
self.bfd_enabled = None
self.bfd_interface = None
self.bfd_interval = None
self.bfd_multiplier = None
self.bfd_peer_ip_address = None
self.bfd_peer_ipv6_address = None
self.bfd_session_state = None
self.configured_mac_address = None
self.configured_priority = None
self.configured_timers = None
self.coup_received_time = Hsrp.Ipv6.Groups.Group.CoupReceivedTime()
self.coup_received_time.parent = self
self.coup_sent_time = Hsrp.Ipv6.Groups.Group.CoupSentTime()
self.coup_sent_time.parent = self
self.current_state_timer_secs = None
self.delay_timer_flag = None
self.delay_timer_msecs = None
self.delay_timer_secs = None
self.followed_session_name = None
self.global_address = YList()
self.global_address.parent = self
self.global_address.name = 'global_address'
self.hello_time = None
self.hello_timer_flag = None
self.hello_timer_msecs = None
self.hello_timer_secs = None
self.hold_time = None
self.hsrp_group_number = None
self.hsrp_router_state = None
self.interface = None
self.interface_name_xr = None
self.is_slave = None
self.learned_hello_time = None
self.learned_hold_time = None
self.min_delay_time = None
self.preempt_delay = None
self.preempt_enabled = None
self.preempt_timer_secs = None
self.redirects_disabled = None
self.reload_delay_time = None
self.resign_received_time = Hsrp.Ipv6.Groups.Group.ResignReceivedTime()
self.resign_received_time.parent = self
self.resign_sent_time = Hsrp.Ipv6.Groups.Group.ResignSentTime()
self.resign_sent_time.parent = self
self.router_priority = None
self.secondary_address = YLeafList()
self.secondary_address.parent = self
self.secondary_address.name = 'secondary_address'
self.session_name = None
self.slaves = None
self.standby_ip_address = None
self.standby_ipv6_address = None
self.standby_mac_address = None
self.standby_timer_flag = None
self.standby_timer_msecs = None
self.standby_timer_secs = None
self.state_change_count = None
self.state_change_history = YList()
self.state_change_history.parent = self
self.state_change_history.name = 'state_change_history'
self.statistics = Hsrp.Ipv6.Groups.Group.Statistics()
self.statistics.parent = self
self.tracked_interface_count = None
self.tracked_interface_up_count = None
self.use_bia_configured = None
self.use_configured_timers = None
self.use_configured_virtual_ip = None
self.version = None
self.virtual_ip_address = None
self.virtual_linklocal_ipv6_address = None
self.virtual_mac_address = None
self.virtual_mac_address_state = None
class ResignSentTime(object):
"""
Time last resign was sent
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:resign-sent-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.ResignSentTime']['meta_info']
class ResignReceivedTime(object):
"""
Time last resign was received
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:resign-received-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.ResignReceivedTime']['meta_info']
class CoupSentTime(object):
"""
Time last coup was sent
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:coup-sent-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.CoupSentTime']['meta_info']
class CoupReceivedTime(object):
"""
Time last coup was received
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:coup-received-time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.CoupReceivedTime']['meta_info']
class Statistics(object):
"""
HSRP Group statistics
.. attribute:: active_transitions
Number of transitions to Active State
**type**\: int
**range:** 0..4294967295
.. attribute:: auth_fail_received
Number of Packets received that failed authentication
**type**\: int
**range:** 0..4294967295
.. attribute:: coup_packets_received
Number of Coup Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: coup_packets_sent
Number of Coup Packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets_received
Number of Hello Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: hello_packets_sent
Number of Hello Packets sent (NB\: Bundles only)
**type**\: int
**range:** 0..4294967295
.. attribute:: init_transitions
Number of transitions to Init State
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_timer_received
Number of packets received with invalid Hello Time value
**type**\: int
**range:** 0..4294967295
.. attribute:: learn_transitions
Number of transitions to Learn State
**type**\: int
**range:** 0..4294967295
.. attribute:: listen_transitions
Number of transitions to Listen State
**type**\: int
**range:** 0..4294967295
.. attribute:: mismatch_virtual_ip_address_received
Number of packets received with mismatching virtual IP address
**type**\: int
**range:** 0..4294967295
.. attribute:: resign_packets_received
Number of Resign Packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: resign_packets_sent
Number of Resign Packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: speak_transitions
Number of transitions to Speak State
**type**\: int
**range:** 0..4294967295
.. attribute:: standby_transitions
Number of transitions to Standby State
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.active_transitions = None
self.auth_fail_received = None
self.coup_packets_received = None
self.coup_packets_sent = None
self.hello_packets_received = None
self.hello_packets_sent = None
self.init_transitions = None
self.invalid_timer_received = None
self.learn_transitions = None
self.listen_transitions = None
self.mismatch_virtual_ip_address_received = None
self.resign_packets_received = None
self.resign_packets_sent = None
self.speak_transitions = None
self.standby_transitions = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.active_transitions is not None:
return True
if self.auth_fail_received is not None:
return True
if self.coup_packets_received is not None:
return True
if self.coup_packets_sent is not None:
return True
if self.hello_packets_received is not None:
return True
if self.hello_packets_sent is not None:
return True
if self.init_transitions is not None:
return True
if self.invalid_timer_received is not None:
return True
if self.learn_transitions is not None:
return True
if self.listen_transitions is not None:
return True
if self.mismatch_virtual_ip_address_received is not None:
return True
if self.resign_packets_received is not None:
return True
if self.resign_packets_sent is not None:
return True
if self.speak_transitions is not None:
return True
if self.standby_transitions is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.Statistics']['meta_info']
class GlobalAddress(object):
"""
Global virtual IPv6 addresses
.. attribute:: ipv6_address
IPV6Address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.ipv6_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:global-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.ipv6_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.GlobalAddress']['meta_info']
class StateChangeHistory(object):
"""
State change history
.. attribute:: new_state
New State
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: old_state
Old State
**type**\: :py:class:`StandbyGrpStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.StandbyGrpStateEnum>`
.. attribute:: reason
Reason for state change
**type**\: :py:class:`HsrpStateChangeReasonEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpStateChangeReasonEnum>`
.. attribute:: time
Time of state change
**type**\: :py:class:`Time <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Groups.Group.StateChangeHistory.Time>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.new_state = None
self.old_state = None
self.reason = None
self.time = Hsrp.Ipv6.Groups.Group.StateChangeHistory.Time()
self.time.parent = self
class Time(object):
"""
Time of state change
.. attribute:: nanoseconds
Nanoseconds
**type**\: int
**range:** 0..4294967295
**units**\: nanosecond
.. attribute:: seconds
Seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.nanoseconds = None
self.seconds = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:time'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.nanoseconds is not None:
return True
if self.seconds is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.StateChangeHistory.Time']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:state-change-history'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.new_state is not None:
return True
if self.old_state is not None:
return True
if self.reason is not None:
return True
if self.time is not None and self.time._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group.StateChangeHistory']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
if self.group_number is None:
raise YPYModelError('Key property group_number is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:groups/Cisco-IOS-XR-ipv4-hsrp-oper:group[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + '][Cisco-IOS-XR-ipv4-hsrp-oper:group-number = ' + str(self.group_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.group_number is not None:
return True
if self.active_ip_address is not None:
return True
if self.active_ipv6_address is not None:
return True
if self.active_mac_address is not None:
return True
if self.active_priority is not None:
return True
if self.active_timer_flag is not None:
return True
if self.active_timer_msecs is not None:
return True
if self.active_timer_secs is not None:
return True
if self.address_family is not None:
return True
if self.authentication_string is not None:
return True
if self.bfd_enabled is not None:
return True
if self.bfd_interface is not None:
return True
if self.bfd_interval is not None:
return True
if self.bfd_multiplier is not None:
return True
if self.bfd_peer_ip_address is not None:
return True
if self.bfd_peer_ipv6_address is not None:
return True
if self.bfd_session_state is not None:
return True
if self.configured_mac_address is not None:
return True
if self.configured_priority is not None:
return True
if self.configured_timers is not None:
return True
if self.coup_received_time is not None and self.coup_received_time._has_data():
return True
if self.coup_sent_time is not None and self.coup_sent_time._has_data():
return True
if self.current_state_timer_secs is not None:
return True
if self.delay_timer_flag is not None:
return True
if self.delay_timer_msecs is not None:
return True
if self.delay_timer_secs is not None:
return True
if self.followed_session_name is not None:
return True
if self.global_address is not None:
for child_ref in self.global_address:
if child_ref._has_data():
return True
if self.hello_time is not None:
return True
if self.hello_timer_flag is not None:
return True
if self.hello_timer_msecs is not None:
return True
if self.hello_timer_secs is not None:
return True
if self.hold_time is not None:
return True
if self.hsrp_group_number is not None:
return True
if self.hsrp_router_state is not None:
return True
if self.interface is not None:
return True
if self.interface_name_xr is not None:
return True
if self.is_slave is not None:
return True
if self.learned_hello_time is not None:
return True
if self.learned_hold_time is not None:
return True
if self.min_delay_time is not None:
return True
if self.preempt_delay is not None:
return True
if self.preempt_enabled is not None:
return True
if self.preempt_timer_secs is not None:
return True
if self.redirects_disabled is not None:
return True
if self.reload_delay_time is not None:
return True
if self.resign_received_time is not None and self.resign_received_time._has_data():
return True
if self.resign_sent_time is not None and self.resign_sent_time._has_data():
return True
if self.router_priority is not None:
return True
if self.secondary_address is not None:
for child in self.secondary_address:
if child is not None:
return True
if self.session_name is not None:
return True
if self.slaves is not None:
return True
if self.standby_ip_address is not None:
return True
if self.standby_ipv6_address is not None:
return True
if self.standby_mac_address is not None:
return True
if self.standby_timer_flag is not None:
return True
if self.standby_timer_msecs is not None:
return True
if self.standby_timer_secs is not None:
return True
if self.state_change_count is not None:
return True
if self.state_change_history is not None:
for child_ref in self.state_change_history:
if child_ref._has_data():
return True
if self.statistics is not None and self.statistics._has_data():
return True
if self.tracked_interface_count is not None:
return True
if self.tracked_interface_up_count is not None:
return True
if self.use_bia_configured is not None:
return True
if self.use_configured_timers is not None:
return True
if self.use_configured_virtual_ip is not None:
return True
if self.version is not None:
return True
if self.virtual_ip_address is not None:
return True
if self.virtual_linklocal_ipv6_address is not None:
return True
if self.virtual_mac_address is not None:
return True
if self.virtual_mac_address_state is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups.Group']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.group is not None:
for child_ref in self.group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Groups']['meta_info']
class Interfaces(object):
"""
The HSRP interface information table
.. attribute:: interface
A HSRP interface entry
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Interfaces.Interface>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
A HSRP interface entry
.. attribute:: interface_name <key>
The interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: interface
IM Interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: statistics
HSRP Interface Statistics
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.Ipv6.Interfaces.Interface.Statistics>`
.. attribute:: use_bia_flag
Use burnt in mac address flag
**type**\: bool
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.interface = None
self.statistics = Hsrp.Ipv6.Interfaces.Interface.Statistics()
self.statistics.parent = self
self.use_bia_flag = None
class Statistics(object):
"""
HSRP Interface Statistics
.. attribute:: advert_packets_received
Number of advertisement packets received
**type**\: int
**range:** 0..4294967295
.. attribute:: advert_packets_sent
Number of advertisement packets sent
**type**\: int
**range:** 0..4294967295
.. attribute:: conflict_source_ip_address_received
Number of packets received from a conflicting Source IP address
**type**\: int
**range:** 0..4294967295
.. attribute:: inoperational_group_received
Number of packets received for an inoperational group
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_operation_code_received
Number of packets received with invalid operation code
**type**\: int
**range:** 0..4294967295
.. attribute:: invalid_version_received
Number of packets received with invalid version
**type**\: int
**range:** 0..4294967295
.. attribute:: long_packets_received
Number of packets received that were too Long
**type**\: int
**range:** 0..4294967295
.. attribute:: short_packets_received
Number of packets received that were too short
**type**\: int
**range:** 0..4294967295
.. attribute:: unknown_group_received
Number of packets received for an unknown group id
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.advert_packets_received = None
self.advert_packets_sent = None
self.conflict_source_ip_address_received = None
self.inoperational_group_received = None
self.invalid_operation_code_received = None
self.invalid_version_received = None
self.long_packets_received = None
self.short_packets_received = None
self.unknown_group_received = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.advert_packets_received is not None:
return True
if self.advert_packets_sent is not None:
return True
if self.conflict_source_ip_address_received is not None:
return True
if self.inoperational_group_received is not None:
return True
if self.invalid_operation_code_received is not None:
return True
if self.invalid_version_received is not None:
return True
if self.long_packets_received is not None:
return True
if self.short_packets_received is not None:
return True
if self.unknown_group_received is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Interfaces.Interface.Statistics']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:interfaces/Cisco-IOS-XR-ipv4-hsrp-oper:interface[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.interface is not None:
return True
if self.statistics is not None and self.statistics._has_data():
return True
if self.use_bia_flag is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6/Cisco-IOS-XR-ipv4-hsrp-oper:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6.Interfaces']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:ipv6'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.groups is not None and self.groups._has_data():
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
if self.tracked_interfaces is not None and self.tracked_interfaces._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Ipv6']['meta_info']
class BfdSessions(object):
"""
The table of HSRP BFD Sessions
.. attribute:: bfd_session
An HSRP BFD Session
**type**\: list of :py:class:`BfdSession <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.BfdSessions.BfdSession>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bfd_session = YList()
self.bfd_session.parent = self
self.bfd_session.name = 'bfd_session'
class BfdSession(object):
"""
An HSRP BFD Session
.. attribute:: interface_name <key>
The interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: ip_address <key>
Destination IP Address of BFD Session
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: bfd_interface_name
BFD Interface Name
**type**\: str
**length:** 0..64
.. attribute:: bfd_interval
BFD packet send interval
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_multiplier
BFD multiplier
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_session_state
BFD session state
**type**\: :py:class:`HsrpBfdSessionStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBfdSessionStateEnum>`
.. attribute:: destination_address
BFD destination address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_ipv6_address
BFD IPv6 destination address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: group
HSRP Groups tracking the BFD session
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.Hsrp.BfdSessions.BfdSession.Group>`
.. attribute:: session_address_family
Session Address family
**type**\: :py:class:`HsrpBAfEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_hsrp_oper.HsrpBAfEnum>`
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.interface_name = None
self.ip_address = None
self.bfd_interface_name = None
self.bfd_interval = None
self.bfd_multiplier = None
self.bfd_session_state = None
self.destination_address = None
self.destination_ipv6_address = None
self.group = YList()
self.group.parent = self
self.group.name = 'group'
self.session_address_family = None
class Group(object):
"""
HSRP Groups tracking the BFD session
.. attribute:: hsrp_group_number
HSRP Group number
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_name
Interface Name
**type**\: str
**length:** 0..64
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.hsrp_group_number = None
self.interface_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-hsrp-oper:group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.hsrp_group_number is not None:
return True
if self.interface_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.BfdSessions.BfdSession.Group']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
if self.ip_address is None:
raise YPYModelError('Key property ip_address is None')
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:bfd-sessions/Cisco-IOS-XR-ipv4-hsrp-oper:bfd-session[Cisco-IOS-XR-ipv4-hsrp-oper:interface-name = ' + str(self.interface_name) + '][Cisco-IOS-XR-ipv4-hsrp-oper:ip-address = ' + str(self.ip_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.interface_name is not None:
return True
if self.ip_address is not None:
return True
if self.bfd_interface_name is not None:
return True
if self.bfd_interval is not None:
return True
if self.bfd_multiplier is not None:
return True
if self.bfd_session_state is not None:
return True
if self.destination_address is not None:
return True
if self.destination_ipv6_address is not None:
return True
if self.group is not None:
for child_ref in self.group:
if child_ref._has_data():
return True
if self.session_address_family is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.BfdSessions.BfdSession']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:bfd-sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bfd_session is not None:
for child_ref in self.bfd_session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.BfdSessions']['meta_info']
class Summary(object):
"""
HSRP summary statistics
.. attribute:: bfd_session_inactive
Number of HSRP BFD sessions in INACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_sessions_down
Number of HSRP BFD sessions in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: bfd_sessions_up
Number of HSRP BFD sessions in UP state
**type**\: int
**range:** 0..4294967295
.. attribute:: interfaces_ipv4_state_down
Number of HSRP interfaces with IPv4 caps in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: interfaces_ipv4_state_up
Number of HSRP interfaces with IPv4 caps in UP state
**type**\: int
**range:** 0..4294967295
.. attribute:: interfaces_ipv6_state_down
Number of HSRP interfaces with IPv6 caps in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: interfaces_ipv6_state_up
Number of HSRP interfaces with IPv6 caps in UP state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_active
Number of IPv4 sessions in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_init
Number of IPv4 sessions in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_learn
Number of IPv4 sessions in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_listen
Number of IPv4 sessions in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_speak
Number of IPv4 sessions in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_sessions_standby
Number of IPv4 sessions in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_active
Number of IPv4 slaves in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_init
Number of IPv4 slaves in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_learn
Number of IPv4 slaves in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_listen
Number of IPv4 slaves in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_speak
Number of IPv4 slaves in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_slaves_standby
Number of IPv4 slaves in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_active_down
Number of DOWN IPv4 Virtual IP Addresses on groups in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_active_up
Number of UP IPv4 Virtual IP Addresses on groups in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_init_down
Number of DOWN IPv4 Virtual IP Addresses on groups in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_init_up
Number of UP IPv4 Virtual IP Addresses on groups in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_learn_down
Number of DOWN IPv4 Virtual IP Addresses on groups in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_learn_up
Number of UP IPv4 Virtual IP Addresses on groups in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_listen_down
Number of DOWN IPv4 Virtual IP Addresses on groups in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_listen_up
Number of UP IPv4 Virtual IP Addresses on groups in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_speak_down
Number of DOWN IPv4 Virtual IP Addresses on groups in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_speak_up
Number of UP IPv4 Virtual IP Addresses on groups in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_standby_down
Number of DOWN IPv4 Virtual IP Addresses on groups in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv4_virtual_ip_addresses_standby_up
Number of UP IPv4 Virtual IP Addresses on groups in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_active
Number of IPv6 sessions in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_init
Number of IPv6 sessions in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_learn
Number of IPv6 sessions in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_listen
Number of IPv6 sessions in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_speak
Number of IPv6 sessions in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_sessions_standby
Number of IPv6 sessions in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_active
Number of IPv6 slaves in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_init
Number of IPv6 slaves in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_learn
Number of IPv6 slaves in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_listen
Number of IPv6 slaves in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_speak
Number of IPv6 slaves in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_slaves_standby
Number of IPv6 slaves in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_active_down
Number of DOWN IPv6 Virtual IP Addresses on groups in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_active_up
Number of UP IPv6 Virtual IP Addresses on groups in ACTIVE state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_init_down
Number of DOWN IPv6 Virtual IP Addresses on groups in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_init_up
Number of UP IPv6 Virtual IP Addresses on groups in INIT state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_learn_down
Number of DOWN IPv6 Virtual IP Addresses on groups in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_learn_up
Number of UP IPv6 Virtual IP Addresses on groups in LEARN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_listen_down
Number of DOWN IPv6 Virtual IP Addresses on groups in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_listen_up
Number of UP IPv6 Virtual IP Addresses on groups in LISTEN state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_speak_down
Number of DOWN IPv6 Virtual IP Addresses on groups in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_speak_up
Number of UP IPv6 Virtual IP Addresses on groups in SPEAK state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_standby_down
Number of DOWN IPv6 Virtual IP Addresses on groups in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_virtual_ip_addresses_standby_up
Number of UP IPv6 Virtual IP Addresses on groups in STANDBY state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interfaces_ipv4_state_down
Number of tracked interfaces with IPv4 caps in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interfaces_ipv4_state_up
Number of tracked interfaces with IPv4 caps in UP state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interfaces_ipv6_state_down
Number of tracked interfaces with IPv6 caps in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_interfaces_ipv6_state_up
Number of tracked interfaces with IPv6 caps in UP state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_objects_down
Number of tracked objects in DOWN state
**type**\: int
**range:** 0..4294967295
.. attribute:: tracked_objects_up
Number of tracked objects in UP state
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ipv4-hsrp-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.bfd_session_inactive = None
self.bfd_sessions_down = None
self.bfd_sessions_up = None
self.interfaces_ipv4_state_down = None
self.interfaces_ipv4_state_up = None
self.interfaces_ipv6_state_down = None
self.interfaces_ipv6_state_up = None
self.ipv4_sessions_active = None
self.ipv4_sessions_init = None
self.ipv4_sessions_learn = None
self.ipv4_sessions_listen = None
self.ipv4_sessions_speak = None
self.ipv4_sessions_standby = None
self.ipv4_slaves_active = None
self.ipv4_slaves_init = None
self.ipv4_slaves_learn = None
self.ipv4_slaves_listen = None
self.ipv4_slaves_speak = None
self.ipv4_slaves_standby = None
self.ipv4_virtual_ip_addresses_active_down = None
self.ipv4_virtual_ip_addresses_active_up = None
self.ipv4_virtual_ip_addresses_init_down = None
self.ipv4_virtual_ip_addresses_init_up = None
self.ipv4_virtual_ip_addresses_learn_down = None
self.ipv4_virtual_ip_addresses_learn_up = None
self.ipv4_virtual_ip_addresses_listen_down = None
self.ipv4_virtual_ip_addresses_listen_up = None
self.ipv4_virtual_ip_addresses_speak_down = None
self.ipv4_virtual_ip_addresses_speak_up = None
self.ipv4_virtual_ip_addresses_standby_down = None
self.ipv4_virtual_ip_addresses_standby_up = None
self.ipv6_sessions_active = None
self.ipv6_sessions_init = None
self.ipv6_sessions_learn = None
self.ipv6_sessions_listen = None
self.ipv6_sessions_speak = None
self.ipv6_sessions_standby = None
self.ipv6_slaves_active = None
self.ipv6_slaves_init = None
self.ipv6_slaves_learn = None
self.ipv6_slaves_listen = None
self.ipv6_slaves_speak = None
self.ipv6_slaves_standby = None
self.ipv6_virtual_ip_addresses_active_down = None
self.ipv6_virtual_ip_addresses_active_up = None
self.ipv6_virtual_ip_addresses_init_down = None
self.ipv6_virtual_ip_addresses_init_up = None
self.ipv6_virtual_ip_addresses_learn_down = None
self.ipv6_virtual_ip_addresses_learn_up = None
self.ipv6_virtual_ip_addresses_listen_down = None
self.ipv6_virtual_ip_addresses_listen_up = None
self.ipv6_virtual_ip_addresses_speak_down = None
self.ipv6_virtual_ip_addresses_speak_up = None
self.ipv6_virtual_ip_addresses_standby_down = None
self.ipv6_virtual_ip_addresses_standby_up = None
self.tracked_interfaces_ipv4_state_down = None
self.tracked_interfaces_ipv4_state_up = None
self.tracked_interfaces_ipv6_state_down = None
self.tracked_interfaces_ipv6_state_up = None
self.tracked_objects_down = None
self.tracked_objects_up = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp/Cisco-IOS-XR-ipv4-hsrp-oper:summary'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bfd_session_inactive is not None:
return True
if self.bfd_sessions_down is not None:
return True
if self.bfd_sessions_up is not None:
return True
if self.interfaces_ipv4_state_down is not None:
return True
if self.interfaces_ipv4_state_up is not None:
return True
if self.interfaces_ipv6_state_down is not None:
return True
if self.interfaces_ipv6_state_up is not None:
return True
if self.ipv4_sessions_active is not None:
return True
if self.ipv4_sessions_init is not None:
return True
if self.ipv4_sessions_learn is not None:
return True
if self.ipv4_sessions_listen is not None:
return True
if self.ipv4_sessions_speak is not None:
return True
if self.ipv4_sessions_standby is not None:
return True
if self.ipv4_slaves_active is not None:
return True
if self.ipv4_slaves_init is not None:
return True
if self.ipv4_slaves_learn is not None:
return True
if self.ipv4_slaves_listen is not None:
return True
if self.ipv4_slaves_speak is not None:
return True
if self.ipv4_slaves_standby is not None:
return True
if self.ipv4_virtual_ip_addresses_active_down is not None:
return True
if self.ipv4_virtual_ip_addresses_active_up is not None:
return True
if self.ipv4_virtual_ip_addresses_init_down is not None:
return True
if self.ipv4_virtual_ip_addresses_init_up is not None:
return True
if self.ipv4_virtual_ip_addresses_learn_down is not None:
return True
if self.ipv4_virtual_ip_addresses_learn_up is not None:
return True
if self.ipv4_virtual_ip_addresses_listen_down is not None:
return True
if self.ipv4_virtual_ip_addresses_listen_up is not None:
return True
if self.ipv4_virtual_ip_addresses_speak_down is not None:
return True
if self.ipv4_virtual_ip_addresses_speak_up is not None:
return True
if self.ipv4_virtual_ip_addresses_standby_down is not None:
return True
if self.ipv4_virtual_ip_addresses_standby_up is not None:
return True
if self.ipv6_sessions_active is not None:
return True
if self.ipv6_sessions_init is not None:
return True
if self.ipv6_sessions_learn is not None:
return True
if self.ipv6_sessions_listen is not None:
return True
if self.ipv6_sessions_speak is not None:
return True
if self.ipv6_sessions_standby is not None:
return True
if self.ipv6_slaves_active is not None:
return True
if self.ipv6_slaves_init is not None:
return True
if self.ipv6_slaves_learn is not None:
return True
if self.ipv6_slaves_listen is not None:
return True
if self.ipv6_slaves_speak is not None:
return True
if self.ipv6_slaves_standby is not None:
return True
if self.ipv6_virtual_ip_addresses_active_down is not None:
return True
if self.ipv6_virtual_ip_addresses_active_up is not None:
return True
if self.ipv6_virtual_ip_addresses_init_down is not None:
return True
if self.ipv6_virtual_ip_addresses_init_up is not None:
return True
if self.ipv6_virtual_ip_addresses_learn_down is not None:
return True
if self.ipv6_virtual_ip_addresses_learn_up is not None:
return True
if self.ipv6_virtual_ip_addresses_listen_down is not None:
return True
if self.ipv6_virtual_ip_addresses_listen_up is not None:
return True
if self.ipv6_virtual_ip_addresses_speak_down is not None:
return True
if self.ipv6_virtual_ip_addresses_speak_up is not None:
return True
if self.ipv6_virtual_ip_addresses_standby_down is not None:
return True
if self.ipv6_virtual_ip_addresses_standby_up is not None:
return True
if self.tracked_interfaces_ipv4_state_down is not None:
return True
if self.tracked_interfaces_ipv4_state_up is not None:
return True
if self.tracked_interfaces_ipv6_state_down is not None:
return True
if self.tracked_interfaces_ipv6_state_up is not None:
return True
if self.tracked_objects_down is not None:
return True
if self.tracked_objects_up is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp.Summary']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-hsrp-oper:hsrp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bfd_sessions is not None and self.bfd_sessions._has_data():
return True
if self.ipv4 is not None and self.ipv4._has_data():
return True
if self.ipv6 is not None and self.ipv6._has_data():
return True
if self.mgo_sessions is not None and self.mgo_sessions._has_data():
return True
if self.summary is not None and self.summary._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_hsrp_oper as meta
return meta._meta_table['Hsrp']['meta_info']
| apache-2.0 |
FrancoCotter/ReactTimerAPP | node_modules/node-gyp/gyp/pylib/gyp/common.py | 1292 | 20063 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import with_statement
import collections
import errno
import filecmp
import os.path
import re
import tempfile
import sys
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
# among other "problems".
class memoize(object):
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
result = self.func(*args)
self.cache[args] = result
return result
class GypError(Exception):
"""Error class representing an error, which is to be presented
to the user. The main entry point will catch and display this.
"""
pass
def ExceptionAppend(e, msg):
"""Append a message to the given exception's message."""
if not e.args:
e.args = (msg,)
elif len(e.args) == 1:
e.args = (str(e.args[0]) + ' ' + msg,)
else:
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def FindQualifiedTargets(target, qualified_list):
"""
Given a list of qualified targets, return the qualified targets for the
specified |target|.
"""
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
target_split = target.rsplit(':', 1)
if len(target_split) == 2:
[build_file, target] = target_split
else:
build_file = None
target_split = target.rsplit('#', 1)
if len(target_split) == 2:
[target, toolset] = target_split
else:
toolset = None
return [build_file, target, toolset]
def ResolveTarget(build_file, target, toolset):
# This function resolves a target into a canonical form:
# - a fully defined build file, either absolute or relative to the current
# directory
# - a target name
# - a toolset
#
# build_file is the file relative to which 'target' is defined.
# target is the qualified target.
# toolset is the default toolset for that target.
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
if parsed_build_file:
if build_file:
# If a relative path, parsed_build_file is relative to the directory
# containing build_file. If build_file is not in the current directory,
# parsed_build_file is not a usable path as-is. Resolve it by
# interpreting it as relative to build_file. If parsed_build_file is
# absolute, it is usable as a path regardless of the current directory,
# and os.path.join will return it as-is.
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
parsed_build_file))
# Further (to handle cases like ../cwd), make it relative to cwd)
if not os.path.isabs(build_file):
build_file = RelativePath(build_file, '.')
else:
build_file = parsed_build_file
if parsed_toolset:
toolset = parsed_toolset
return [build_file, target, toolset]
def BuildFile(fully_qualified_target):
# Extracts the build file from the fully qualified target.
return ParseQualifiedTarget(fully_qualified_target)[0]
def GetEnvironFallback(var_list, default):
"""Look up a key in the environment, with fallback to secondary keys
and finally falling back to a default value."""
for var in var_list:
if var in os.environ:
return os.environ[var]
return default
def QualifiedTarget(build_file, target, toolset):
# "Qualified" means the file that a target was defined in and the target
# name, separated by a colon, suffixed by a # and the toolset name:
# /path/to/file.gyp:target_name#toolset
fully_qualified = build_file + ':' + target
if toolset:
fully_qualified = fully_qualified + '#' + toolset
return fully_qualified
@memoize
def RelativePath(path, relative_to, follow_path_symlink=True):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
# this method returns a path to the real file represented by |path|. If it is
# false, this method returns a path to the symlink. If |path| is not a
# symlink, this option has no effect.
# Convert to normalized (and therefore absolute paths).
if follow_path_symlink:
path = os.path.realpath(path)
else:
path = os.path.abspath(path)
relative_to = os.path.realpath(relative_to)
# On Windows, we can't create a relative path to a different drive, so just
# use the absolute path.
if sys.platform == 'win32':
if (os.path.splitdrive(path)[0].lower() !=
os.path.splitdrive(relative_to)[0].lower()):
return path
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
# Determine how much of the prefix the two paths share.
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
# Put enough ".." components to back up out of relative_to to the common
# prefix, and then append the part of path_split after the common prefix.
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
path_split[prefix_len:]
if len(relative_split) == 0:
# The paths were the same.
return ''
# Turn it back into a string and we're done.
return os.path.join(*relative_split)
@memoize
def InvertRelativePath(path, toplevel_dir=None):
"""Given a path like foo/bar that is relative to toplevel_dir, return
the inverse relative path back to the toplevel_dir.
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
should always produce the empty string, unless the path contains symlinks.
"""
if not path:
return path
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
def FixIfRelativePath(path, relative_to):
# Like RelativePath but returns |path| unchanged if it is absolute.
if os.path.isabs(path):
return path
return RelativePath(path, relative_to)
def UnrelativePath(path, relative_to):
# Assuming that |relative_to| is relative to the current directory, and |path|
# is a path relative to the dirname of |relative_to|, returns a path that
# identifies |path| relative to the current directory.
rel_dir = os.path.dirname(relative_to)
return os.path.normpath(os.path.join(rel_dir, path))
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
# and the documentation for various shells.
# _quote is a pattern that should match any argument that needs to be quoted
# with double-quotes by EncodePOSIXShellArgument. It matches the following
# characters appearing anywhere in an argument:
# \t, \n, space parameter separators
# # comments
# $ expansions (quoted to always expand within one argument)
# % called out by IEEE 1003.1 XCU.2.2
# & job control
# ' quoting
# (, ) subshell execution
# *, ?, [ pathname expansion
# ; command delimiter
# <, >, | redirection
# = assignment
# {, } brace expansion (bash)
# ~ tilde expansion
# It also matches the empty string, because "" (or '') is the only way to
# represent an empty string literal argument to a POSIX shell.
#
# This does not match the characters in _escape, because those need to be
# backslash-escaped regardless of whether they appear in a double-quoted
# string.
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
# _escape is a pattern that should match any character that needs to be
# escaped with a backslash, whether or not the argument matched the _quote
# pattern. _escape is used with re.sub to backslash anything in _escape's
# first match group, hence the (parentheses) in the regular expression.
#
# _escape matches the following characters appearing anywhere in an argument:
# " to prevent POSIX shells from interpreting this character for quoting
# \ to prevent POSIX shells from interpreting this character for escaping
# ` to prevent POSIX shells from interpreting this character for command
# substitution
# Missing from this list is $, because the desired behavior of
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
#
# Also missing from this list is !, which bash will interpret as the history
# expansion character when history is enabled. bash does not enable history
# by default in non-interactive shells, so this is not thought to be a problem.
# ! was omitted from this list because bash interprets "\!" as a literal string
# including the backslash character (avoiding history expansion but retaining
# the backslash), which would not be correct for argument encoding. Handling
# this case properly would also be problematic because bash allows the history
# character to be changed with the histchars shell variable. Fortunately,
# as history is not enabled in non-interactive shells and
# EncodePOSIXShellArgument is only expected to encode for non-interactive
# shells, there is no room for error here by ignoring !.
_escape = re.compile(r'(["\\`])')
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
argument may be quoted and escaped as necessary to ensure that POSIX shells
treat the returned value as a literal representing the argument passed to
this function. Parameter (variable) expansions beginning with $ are allowed
to remain intact without escaping the $, to allow the argument to contain
references to variables to be expanded by the shell.
"""
if not isinstance(argument, str):
argument = str(argument)
if _quote.search(argument):
quote = '"'
else:
quote = ''
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
return encoded
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return ' '.join(encoded_arguments)
def DeepDependencyTargets(target_dicts, roots):
"""Returns the recursive list of target dependencies."""
dependencies = set()
pending = set(roots)
while pending:
# Pluck out one.
r = pending.pop()
# Skip if visited already.
if r in dependencies:
continue
# Add it.
dependencies.add(r)
# Add its children.
spec = target_dicts[r]
pending.update(set(spec.get('dependencies', [])))
pending.update(set(spec.get('dependencies_original', [])))
return list(dependencies - set(roots))
def BuildFileTargets(target_list, build_file):
"""From a target_list, returns the subset from the specified build_file.
"""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
"""Returns all targets (direct and dependencies) for the specified build_file.
"""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
Arguments:
filename: name of the file to potentially write to.
Returns:
A file like object which will write to temporary file and only overwrite
the target if it differs (on close).
"""
class Writer(object):
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
tmp_fd, self.tmp_path = tempfile.mkstemp(
suffix='.tmp',
prefix=os.path.split(filename)[1] + '.gyp.',
dir=os.path.split(filename)[0])
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
def __getattr__(self, attrname):
# Delegate everything else to self.tmp_file
return getattr(self.tmp_file, attrname)
def close(self):
try:
# Close tmp file.
self.tmp_file.close()
# Determine if different.
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(self.tmp_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(self.tmp_path, 0666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
# is no way to make the switch atomic.
os.remove(filename)
os.rename(self.tmp_path, filename)
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
return Writer()
def EnsureDirExists(path):
"""Make sure the directory for |path| exists."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
'cygwin': 'win',
'win32': 'win',
'darwin': 'mac',
}
if 'flavor' in params:
return params['flavor']
if sys.platform in flavors:
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
if sys.platform.startswith('freebsd'):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
if sys.platform.startswith('netbsd'):
return 'netbsd'
if sys.platform.startswith('aix'):
return 'aix'
return 'linux'
def CopyTool(flavor, out_path):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
# support scripts.
prefix = {
'aix': 'flock',
'solaris': 'flock',
'mac': 'mac',
'win': 'win'
}.get(flavor, None)
if not prefix:
return
# Slurp input file.
source_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
with open(source_path) as source_file:
source = source_file.readlines()
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
idfun = lambda x: x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
# Based on http://code.activestate.com/recipes/576694/.
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev_item, next_item = self.map.pop(key)
prev_item[2] = next_item
next_item[1] = prev_item
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
# The second argument is an addition that causes a pylint warning.
def pop(self, last=True): # pylint: disable=W0221
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
# Extensions to the recipe.
def update(self, iterable):
for i in iterable:
if i not in self:
self.add(i)
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
self.nodes = nodes
def __str__(self):
return 'CycleError: cycle involving: ' + str(self.nodes)
def TopologicallySorted(graph, get_edges):
r"""Topologically sort based on a user provided edge definition.
Args:
graph: A list of node names.
get_edges: A function mapping from node name to a hashable collection
of node names which this node has outgoing edges to.
Returns:
A list containing all of the node in graph in topological order.
It is assumed that calling get_edges once for each node and caching is
cheaper than repeatedly calling get_edges.
Raises:
CycleError in the event of a cycle.
Example:
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
def GetEdges(node):
return re.findall(r'\$\(([^))]\)', graph[node])
print TopologicallySorted(graph.keys(), GetEdges)
==>
['a', 'c', b']
"""
get_edges = memoize(get_edges)
visited = set()
visiting = set()
ordered_nodes = []
def Visit(node):
if node in visiting:
raise CycleError(visiting)
if node in visited:
return
visited.add(node)
visiting.add(node)
for neighbor in get_edges(node):
Visit(neighbor)
visiting.remove(node)
ordered_nodes.insert(0, node)
for node in sorted(graph):
Visit(node)
return ordered_nodes
def CrossCompileRequested():
# TODO: figure out how to not build extra host objects in the
# non-cross-compile case when this is enabled, and enable unconditionally.
return (os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
| mit |
bossiernesto/ristretto-orm | test/utils/objectPool.py | 1 | 1666 | from ristrettoORM.utils.objectPool import ObjectPool,poolObj
from unittest import TestCase
# Fixture
class A:
def bleh(self):
return 3
def aaa(self):
return 34
class AWithkwargs(object):
def __init__(self, *args, **kwargs):
self.kwargs = kwargs
#Custom Pool
CUSTOM_POOL_SIZE = 3
class CustomPool(ObjectPool):
pool_size = CUSTOM_POOL_SIZE
class ObjectPoolTest(TestCase):
def setUp(self):
self.a_pool = ObjectPool(A)
self.custom_pool = CustomPool(A)
def test_get_object(self):
self.assertTrue(isinstance(self.a_pool.borrowObj(), A))
def test_get_two_objects(self):
obj1 = self.a_pool.borrowObj()
obj2 = self.a_pool.borrowObj()
self.assertTrue(isinstance(obj1, A))
self.assertTrue(isinstance(obj2, A))
self.assertNotEqual(obj1, obj2)
def test_get_object_kwargs(self):
expected_dict = {'a': 2}
self.akwargs_pool = ObjectPool(AWithkwargs, kwargs=expected_dict)
obj1 = self.akwargs_pool.borrowObj()
self.assertTrue(isinstance(obj1, AWithkwargs))
self.assertEqual(obj1.kwargs, expected_dict)
def test_custom_pool_size(self):
self.assertEqual(CUSTOM_POOL_SIZE,self.custom_pool.pool_size)
def test_custom_pool_context(self):
with poolObj(self.custom_pool) as borrowed_object:
self.asserted_object = borrowed_object
self.assertTrue(isinstance(borrowed_object,A))
self.assertEqual(34,borrowed_object.aaa())
self.assertEqual(1,self.custom_pool.queue.qsize())
self.assertEqual(self.asserted_object,self.custom_pool.queue._get())
| gpl-3.0 |
CoolCloud/taiga-back | tests/integration/test_roles.py | 20 | 2852 | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014 Anler Hernández <hello@anler.me>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from django.core.urlresolvers import reverse
from taiga.users.models import Role
from taiga.projects.models import Membership
from taiga.projects.models import Project
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_destroy_role_and_reassign_members(client):
user1 = f.UserFactory.create()
user2 = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user1)
role1 = f.RoleFactory.create(project=project)
role2 = f.RoleFactory.create(project=project)
f.MembershipFactory.create(project=project, user=user1, role=role1, is_owner=True)
f.MembershipFactory.create(project=project, user=user2, role=role2)
url = reverse("roles-detail", args=[role2.pk]) + "?moveTo={}".format(role1.pk)
client.login(user1)
response = client.delete(url)
assert response.status_code == 204
qs = Role.objects.filter(project=project)
assert qs.count() == 1
qs = Membership.objects.filter(project=project, role_id=role2.pk)
assert qs.count() == 0
qs = Membership.objects.filter(project=project, role_id=role1.pk)
assert qs.count() == 2
def test_destroy_role_and_reassign_members_with_deleted_project(client):
"""
Regression test, that fixes some 500 errors on production
"""
user1 = f.UserFactory.create()
user2 = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user1)
role1 = f.RoleFactory.create(project=project)
role2 = f.RoleFactory.create(project=project)
f.MembershipFactory.create(project=project, user=user1, role=role1)
f.MembershipFactory.create(project=project, user=user2, role=role2)
Project.objects.filter(pk=project.id).delete()
url = reverse("roles-detail", args=[role2.pk]) + "?moveTo={}".format(role1.pk)
client.login(user1)
response = client.delete(url)
# FIXME: really should return 403? I think it should be 404
assert response.status_code == 403, response.content
| agpl-3.0 |
Sulter/MASTERlinker | plugins/DCC.py | 1 | 1837 | # plugin that supports DCC, still unsecure, and only works with clients using reverse DCC (mIRC, xchat, hexchat [...])
import settings
import re
import socket
import threading
import random
bot_dec_IP = "XXXXX"
class DCC():
def DCC(self, main_ref, msg_info):
if msg_info["channel"] == settings.NICK:
r = re.search("DCC SEND (\S+) (\d+) 0 (\d+) (\d+)", msg_info["message"])
if r: # reverse DCC detected
self.main_ref = main_ref
self.msg_info = msg_info
t = threading.Thread(target=self.r_dcc, args=(r, msg_info["nick"]))
t.start()
def r_dcc(self, r, nick):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while 1: # keep trying random ports until it finds a open one
port = random.randint(10000, 25000)
try:
s.bind(("", port))
break
except:
print("port was occupied, we try again")
s.listen(5)
# send the accepting message back
self.send_response(r, port, nick)
(client_socket, address) = s.accept()
file_size = int(r.group(3), 10)
f = open("download/" + r.group(1), 'wb')
while 1:
new_data = client_socket.recv(4096)
if not len(new_data):
break
f.write(new_data)
file_size = file_size - len(new_data)
if file_size <= 0:
break
client_socket.close()
f.close()
def send_response(self, r, port, nick):
filename = r.group(1)
file_size = r.group(3)
token = r.group(4)
response = "\001DCC SEND " + filename + bot_dec_IP + str(port) + " " + file_size + " " + token + "\001"
self.main_ref.send_msg(nick, response)
| mit |
kawasaki2013/python-for-android-x86 | python3-alpha/python3-src/Lib/idlelib/IdleHistory.py | 136 | 3145 | from idlelib.configHandler import idleConf
class History:
def __init__(self, text, output_sep = "\n"):
self.text = text
self.history = []
self.history_prefix = None
self.history_pointer = None
self.output_sep = output_sep
self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool")
text.bind("<<history-previous>>", self.history_prev)
text.bind("<<history-next>>", self.history_next)
def history_next(self, event):
self.history_do(0)
return "break"
def history_prev(self, event):
self.history_do(1)
return "break"
def _get_source(self, start, end):
# Get source code from start index to end index. Lines in the
# text control may be separated by sys.ps2 .
lines = self.text.get(start, end).split(self.output_sep)
return "\n".join(lines)
def _put_source(self, where, source):
output = self.output_sep.join(source.split("\n"))
self.text.insert(where, output)
def history_do(self, reverse):
nhist = len(self.history)
pointer = self.history_pointer
prefix = self.history_prefix
if pointer is not None and prefix is not None:
if self.text.compare("insert", "!=", "end-1c") or \
self._get_source("iomark", "end-1c") != self.history[pointer]:
pointer = prefix = None
if pointer is None or prefix is None:
prefix = self._get_source("iomark", "end-1c")
if reverse:
pointer = nhist
else:
if self.cyclic:
pointer = -1
else:
self.text.bell()
return
nprefix = len(prefix)
while 1:
if reverse:
pointer = pointer - 1
else:
pointer = pointer + 1
if pointer < 0 or pointer >= nhist:
self.text.bell()
if not self.cyclic and pointer < 0:
return
else:
if self._get_source("iomark", "end-1c") != prefix:
self.text.delete("iomark", "end-1c")
self._put_source("iomark", prefix)
pointer = prefix = None
break
item = self.history[pointer]
if item[:nprefix] == prefix and len(item) > nprefix:
self.text.delete("iomark", "end-1c")
self._put_source("iomark", item)
break
self.text.mark_set("insert", "end-1c")
self.text.see("insert")
self.text.tag_remove("sel", "1.0", "end")
self.history_pointer = pointer
self.history_prefix = prefix
def history_store(self, source):
source = source.strip()
if len(source) > 2:
# avoid duplicates
try:
self.history.remove(source)
except ValueError:
pass
self.history.append(source)
self.history_pointer = None
self.history_prefix = None
| apache-2.0 |
hip-odoo/odoo | addons/stock/tests/test_product.py | 25 | 2857 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Author: Leonardo Pistone
# Copyright 2015 Camptocamp SA
from odoo.addons.stock.tests.common2 import TestStockCommon
class TestVirtualAvailable(TestStockCommon):
def setUp(self):
super(TestVirtualAvailable, self).setUp()
self.env['stock.quant'].create({
'product_id': self.product_3.id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'qty': 30.0})
self.env['stock.quant'].create({
'product_id': self.product_3.id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'qty': 10.0,
'owner_id': self.user_stock_user.partner_id.id})
self.picking_out = self.env['stock.picking'].create({
'picking_type_id': self.ref('stock.picking_type_out'),
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id})
self.env['stock.move'].create({
'name': 'a move',
'product_id': self.product_3.id,
'product_uom_qty': 3.0,
'product_uom': self.product_3.uom_id.id,
'picking_id': self.picking_out.id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id})
self.picking_out_2 = self.env['stock.picking'].create({
'picking_type_id': self.ref('stock.picking_type_out'),
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id})
self.env['stock.move'].create({
'restrict_partner_id': self.user_stock_user.partner_id.id,
'name': 'another move',
'product_id': self.product_3.id,
'product_uom_qty': 5.0,
'product_uom': self.product_3.uom_id.id,
'picking_id': self.picking_out_2.id,
'location_id': self.env.ref('stock.stock_location_stock').id,
'location_dest_id': self.env.ref('stock.stock_location_customers').id})
def test_without_owner(self):
self.assertAlmostEqual(40.0, self.product_3.virtual_available)
self.picking_out.action_assign()
self.picking_out_2.action_assign()
self.assertAlmostEqual(32.0, self.product_3.virtual_available)
def test_with_owner(self):
prod_context = self.product_3.with_context(owner_id=self.user_stock_user.partner_id.id)
self.assertAlmostEqual(10.0, prod_context.virtual_available)
self.picking_out.action_assign()
self.picking_out_2.action_assign()
self.assertAlmostEqual(5.0, prod_context.virtual_available)
| agpl-3.0 |
cloudera/ibis | ibis/backends/mysql/tests/conftest.py | 1 | 2351 | import os
from pathlib import Path
from pkg_resources import parse_version
import ibis
from ibis.backends.tests.base import BackendTest, RoundHalfToEven
class TestConf(BackendTest, RoundHalfToEven):
# mysql has the same rounding behavior as postgres
check_dtype = False
supports_window_operations = False
returned_timestamp_unit = 's'
supports_arrays = False
supports_arrays_outside_of_select = supports_arrays
def __init__(self, data_directory: Path) -> None:
super().__init__(data_directory)
# mariadb supports window operations after version 10.2
# but the sqlalchemy version string looks like:
# 5.5.5.10.2.12.MariaDB.10.2.12+maria~jessie
# or 10.4.12.MariaDB.1:10.4.12+maria~bionic
# example of possible results:
# https://github.com/sqlalchemy/sqlalchemy/blob/rel_1_3/
# test/dialect/mysql/test_dialect.py#L244-L268
con = self.connection
if 'MariaDB' in str(con.version):
# we might move this parsing step to the mysql client
version_detail = con.con.dialect._parse_server_version(
str(con.version)
)
version = (
version_detail[:3]
if version_detail[3] == 'MariaDB'
else version_detail[3:6]
)
self.__class__.supports_window_operations = version >= (10, 2)
elif con.version >= parse_version('8.0'):
# mysql supports window operations after version 8
self.__class__.supports_window_operations = True
@staticmethod
def connect(data_directory: Path) -> ibis.client.Client:
user = os.environ.get('IBIS_TEST_MYSQL_USER', 'ibis')
password = os.environ.get('IBIS_TEST_MYSQL_PASSWORD', 'ibis')
host = os.environ.get('IBIS_TEST_MYSQL_HOST', 'localhost')
port = os.environ.get('IBIS_TEST_MYSQL_PORT', 3306)
database = os.environ.get('IBIS_TEST_MYSQL_DATABASE', 'ibis_testing')
return ibis.mysql.connect(
host=host,
port=port,
user=user,
password=password,
database=database,
)
@property
def functional_alltypes(self):
# BOOLEAN <-> TINYINT(1)
t = super().functional_alltypes
return t.mutate(bool_col=t.bool_col == 1)
| apache-2.0 |
cubells/l10n-spain | payment_redsys/models/redsys.py | 1 | 17878 | # © 2016-2017 Sergio Teruel <sergio.teruel@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import hashlib
import hmac
import base64
import logging
import json
import urllib
from odoo import models, fields, api, _
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.addons import decimal_precision as dp
from odoo.tools import config
from odoo.tools.float_utils import float_compare
from odoo import exceptions
from odoo import http
_logger = logging.getLogger(__name__)
try:
from Crypto.Cipher import DES3
except ImportError:
_logger.info("Missing dependency (pycryptodome). See README.")
class AcquirerRedsys(models.Model):
_inherit = 'payment.acquirer'
def _get_redsys_urls(self, environment):
""" Redsys URLs
"""
if environment == 'prod':
return {
'redsys_form_url':
'https://sis.redsys.es/sis/realizarPago/',
}
else:
return {
'redsys_form_url':
'https://sis-t.redsys.es:25443/sis/realizarPago/',
}
provider = fields.Selection(selection_add=[('redsys', 'Redsys')])
redsys_merchant_name = fields.Char('Merchant Name',
required_if_provider='redsys')
redsys_merchant_titular = fields.Char('Merchant Titular',
required_if_provider='redsys')
redsys_merchant_code = fields.Char('Merchant code',
required_if_provider='redsys')
redsys_merchant_description = fields.Char('Product Description',
required_if_provider='redsys')
redsys_secret_key = fields.Char('Secret Key',
required_if_provider='redsys')
redsys_terminal = fields.Char('Terminal', default='1',
required_if_provider='redsys')
redsys_currency = fields.Char('Currency', default='978',
required_if_provider='redsys')
redsys_transaction_type = fields.Char('Transtaction Type', default='0',
required_if_provider='redsys')
redsys_merchant_data = fields.Char('Merchant Data')
redsys_merchant_lang = fields.Selection([('001', 'Castellano'),
('002', 'Inglés'),
('003', 'Catalán'),
('004', 'Francés'),
('005', 'Alemán'),
('006', 'Holandés'),
('007', 'Italiano'),
('008', 'Sueco'),
('009', 'Portugués'),
('010', 'Valenciano'),
('011', 'Polaco'),
('012', 'Gallego'),
('013', 'Euskera'),
], 'Merchant Consumer Language',
default='001')
redsys_pay_method = fields.Selection([('T', 'Pago con Tarjeta'),
('R', 'Pago por Transferencia'),
('D', 'Domiciliacion'),
], 'Payment Method',
default='T')
redsys_signature_version = fields.Selection(
[('HMAC_SHA256_V1', 'HMAC SHA256 V1')], default='HMAC_SHA256_V1')
send_quotation = fields.Boolean('Send quotation', default=True)
redsys_percent_partial = fields.Float(
string='Reduction percent',
digits=dp.get_precision('Account'),
help='Write percent reduction payment, for this method payment.'
'With this option you can allow partial payments in your '
'shop online, the residual amount in pending for do a manual '
'payment later.'
)
@api.constrains('redsys_percent_partial')
def check_redsys_percent_partial(self):
if (self.redsys_percent_partial < 0 or
self.redsys_percent_partial > 100):
raise exceptions.Warning(
_('Partial payment percent must be between 0 and 100'))
@api.model
def _get_website_callback_url(self):
"""For force a callback url from Redsys distinct to base url website,
only apply to a Redsys response.
"""
get_param = self.env['ir.config_parameter'].sudo().get_param
return get_param(
'payment_redsys.callback_url')
@api.model
def _get_website_url(self):
"""
For a single website setting the domain website name is not accesible
for the user, by default is localhost so the system get domain from
system parameters instead of domain of website record.
"""
if config['test_enable']:
return self.env['ir.config_parameter'].sudo().get_param(
'web.base.url')
domain = http.request.website.domain
if domain and domain != 'localhost':
base_url = '%s://%s' % (
http.request.httprequest.environ['wsgi.url_scheme'],
http.request.website.domain
)
else:
base_url = self.env['ir.config_parameter'].sudo().get_param(
'web.base.url')
return base_url or ''
def _prepare_merchant_parameters(self, tx_values):
# Check multi-website
base_url = self._get_website_url()
callback_url = self._get_website_callback_url()
if self.redsys_percent_partial > 0:
amount = tx_values['amount']
tx_values['amount'] = amount - (
amount * self.redsys_percent_partial / 100)
values = {
'Ds_Sermepa_Url': (
self._get_redsys_urls(self.environment)[
'redsys_form_url']),
'Ds_Merchant_Amount': str(int(round(tx_values['amount'] * 100))),
'Ds_Merchant_Currency': self.redsys_currency or '978',
'Ds_Merchant_Order': (
tx_values['reference'] and tx_values['reference'][-12:] or
False),
'Ds_Merchant_MerchantCode': (
self.redsys_merchant_code and
self.redsys_merchant_code[:9]),
'Ds_Merchant_Terminal': self.redsys_terminal or '1',
'Ds_Merchant_TransactionType': (
self.redsys_transaction_type or '0'),
'Ds_Merchant_Titular': (
self.redsys_merchant_titular[:60] and
self.redsys_merchant_titular[:60]),
'Ds_Merchant_MerchantName': (
self.redsys_merchant_name and
self.redsys_merchant_name[:25]),
'Ds_Merchant_MerchantUrl': (
'%s/payment/redsys/return' % (callback_url or base_url))[:250],
'Ds_Merchant_MerchantData': self.redsys_merchant_data or '',
'Ds_Merchant_ProductDescription': (
self._product_description(tx_values['reference']) or
self.redsys_merchant_description and
self.redsys_merchant_description[:125]),
'Ds_Merchant_ConsumerLanguage': (
self.redsys_merchant_lang or '001'),
'Ds_Merchant_UrlOk':
'%s/payment/redsys/result/redsys_result_ok' % base_url,
'Ds_Merchant_UrlKo':
'%s/payment/redsys/result/redsys_result_ko' % base_url,
'Ds_Merchant_Paymethods': self.redsys_pay_method or 'T',
}
return self._url_encode64(json.dumps(values))
def _url_encode64(self, data):
data = base64.b64encode(data.encode())
return data
def _url_decode64(self, data):
return json.loads(base64.b64decode(data).decode())
def sign_parameters(self, secret_key, params64):
params_dic = self._url_decode64(params64)
if 'Ds_Merchant_Order' in params_dic:
order = str(params_dic['Ds_Merchant_Order'])
else:
order = str(
urllib.parse.unquote(params_dic.get('Ds_Order', 'Not found')))
cipher = DES3.new(
key=base64.b64decode(secret_key),
mode=DES3.MODE_CBC,
IV=b'\0\0\0\0\0\0\0\0')
diff_block = len(order) % 8
zeros = diff_block and (b'\0' * (8 - diff_block)) or b''
key = cipher.encrypt(str.encode(order + zeros.decode()))
if isinstance(params64, str):
params64 = params64.encode()
dig = hmac.new(
key=key,
msg=params64,
digestmod=hashlib.sha256).digest()
return base64.b64encode(dig).decode()
@api.multi
def redsys_form_generate_values(self, values):
self.ensure_one()
redsys_values = dict(values)
merchant_parameters = self._prepare_merchant_parameters(values)
redsys_values.update({
'Ds_SignatureVersion': str(self.redsys_signature_version),
'Ds_MerchantParameters': merchant_parameters,
'Ds_Signature': self.sign_parameters(
self.redsys_secret_key, merchant_parameters),
})
return redsys_values
@api.multi
def redsys_get_form_action_url(self):
return self._get_redsys_urls(self.environment)['redsys_form_url']
def _product_description(self, order_ref):
sale_order = self.env['sale.order'].search([('name', '=', order_ref)])
res = ''
if sale_order:
description = '|'.join(x.name for x in sale_order.order_line)
res = description[:125]
return res
class TxRedsys(models.Model):
_inherit = 'payment.transaction'
redsys_txnid = fields.Char('Transaction ID')
def merchant_params_json2dict(self, data):
parameters = data.get('Ds_MerchantParameters', '')
return json.loads(base64.b64decode(parameters).decode())
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
@api.model
def _redsys_form_get_tx_from_data(self, data):
""" Given a data dict coming from redsys, verify it and
find the related transaction record. """
parameters = data.get('Ds_MerchantParameters', '')
parameters_dic = json.loads(base64.b64decode(parameters).decode())
reference = urllib.parse.unquote(parameters_dic.get('Ds_Order', ''))
pay_id = parameters_dic.get('Ds_AuthorisationCode')
shasign = data.get(
'Ds_Signature', '').replace('_', '/').replace('-', '+')
test_env = http.request.session.get('test_enable', False)
if not reference or not pay_id or not shasign:
error_msg = 'Redsys: received data with missing reference' \
' (%s) or pay_id (%s) or shashign (%s)' % (reference,
pay_id, shasign)
if not test_env:
_logger.info(error_msg)
raise ValidationError(error_msg)
# For tests
http.OpenERPSession.tx_error = True
tx = self.search([('reference', '=', reference)])
if not tx or len(tx) > 1:
error_msg = 'Redsys: received data for reference %s' % (reference)
if not tx:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
if not test_env:
_logger.info(error_msg)
raise ValidationError(error_msg)
# For tests
http.OpenERPSession.tx_error = True
if tx and not test_env:
# verify shasign
shasign_check = tx.acquirer_id.sign_parameters(
tx.acquirer_id.redsys_secret_key, parameters)
if shasign_check != shasign:
error_msg = (
'Redsys: invalid shasign, received %s, computed %s, '
'for data %s' % (shasign, shasign_check, data)
)
_logger.info(error_msg)
raise ValidationError(error_msg)
return tx
@api.multi
def _redsys_form_get_invalid_parameters(self, data):
test_env = http.request.session.get('test_enable', False)
invalid_parameters = []
parameters_dic = self.merchant_params_json2dict(data)
if (self.acquirer_reference and
parameters_dic.get('Ds_Order')) != self.acquirer_reference:
invalid_parameters.append(
('Transaction Id', parameters_dic.get('Ds_Order'),
self.acquirer_reference))
# check what is buyed
if self.acquirer_id.redsys_percent_partial > 0.0:
new_amount = self.amount - (
self.amount * self.acquirer_id.redsys_percent_partial / 100)
self.amount = new_amount
if (float_compare(float(parameters_dic.get('Ds_Amount', '0.0')) / 100,
self.amount, 2) != 0):
invalid_parameters.append(
('Amount', parameters_dic.get('Ds_Amount'),
'%.2f' % self.amount))
if invalid_parameters and test_env:
# If transaction is in test mode invalidate invalid_parameters
# to avoid logger error from parent method
return []
return invalid_parameters
@api.model
def _get_redsys_state(self, status_code):
if 0 <= status_code <= 100:
return "done"
elif status_code <= 203:
return "pending"
elif 912 <= status_code <= 9912:
return "cancel"
else:
return "error"
def _redsys_form_validate(self, data):
params = self.merchant_params_json2dict(data)
status_code = int(params.get('Ds_Response', '29999'))
state = self._get_redsys_state(status_code)
vals = {
'state': state,
'redsys_txnid': params.get('Ds_AuthorisationCode'),
}
state_message = ""
if state == 'done':
vals['state_message'] = _('Ok: %s') % params.get('Ds_Response')
elif state == 'pending': # 'Payment error: code: %s.'
state_message = _('Error: %s (%s)')
elif state == 'cancel': # 'Payment error: bank unavailable.'
state_message = _('Bank Error: %s (%s)')
else:
state_message = _('Redsys: feedback error %s (%s)')
if state_message:
vals['state_message'] = state_message % (
params.get('Ds_Response'), params.get('Ds_ErrorCode'),
)
if state == 'error':
_logger.warning(vals['state_message'])
self.write(vals)
return state != 'error'
@api.model
def form_feedback(self, data, acquirer_name):
res = super(TxRedsys, self).form_feedback(data, acquirer_name)
try:
tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name
if hasattr(self, tx_find_method_name):
tx = getattr(self, tx_find_method_name)(data)
_logger.info(
'<%s> transaction processed: tx ref:%s, tx amount: %s',
acquirer_name, tx.reference if tx else 'n/a',
tx.amount if tx else 'n/a')
if tx.acquirer_id.redsys_percent_partial > 0:
if tx and tx.sale_order_ids and tx.sale_order_ids.ensure_one():
percent_reduction = tx.acquirer_id.redsys_percent_partial
new_so_amount = (
tx.sale_order_ids.amount_total - (
tx.sale_order_ids.amount_total *
percent_reduction / 100))
amount_matches = (
tx.sale_order_ids.state in ['draft', 'sent'] and
float_compare(tx.amount, new_so_amount, 2) == 0)
if amount_matches:
if tx.state == 'done':
_logger.info(
'<%s> transaction completed, confirming order '
'%s (ID %s)', acquirer_name,
tx.sale_order_ids.name, tx.sale_order_ids.id)
if not self.env.context.get('bypass_test', False):
tx.sale_order_ids.with_context(
send_email=True).action_confirm()
elif (tx.state != 'cancel' and
tx.sale_order_ids.state == 'draft'):
_logger.info('<%s> transaction pending, sending '
'quote email for order %s (ID %s)',
acquirer_name, tx.sale_order_ids.name,
tx.sale_order_ids.id)
if not self.env.context.get('bypass_test', False):
tx.sale_order_ids.force_quotation_send()
else:
_logger.warning('<%s> transaction MISMATCH for order '
'%s (ID %s)', acquirer_name,
tx.sale_order_ids.name,
tx.sale_order_ids.id)
except Exception:
_logger.exception(
'Fail to confirm the order or send the confirmation email%s',
tx and ' for the transaction %s' % tx.reference or '')
return res
| agpl-3.0 |
hzlf/openbroadcast | website/shop/shop/tests/cart.py | 3 | 8271 | # -*- coding: utf-8 -*-
from __future__ import with_statement
from decimal import Decimal
from django.contrib.auth.models import User
from django.test.testcases import TestCase
from shop.cart.modifiers_pool import cart_modifiers_pool
from shop.models.cartmodel import Cart, CartItem
from shop.models.productmodel import Product
from shop.tests.utils.context_managers import SettingsOverride
# This try except is there to let people run the tests from any project
# Not only from the provided "test" project.
SKIP_BASEPRODUCT_TEST = False
try:
from project.models import BaseProduct
except:
SKIP_BASEPRODUCT_TEST = True
class CartTestCase(TestCase):
PRODUCT_PRICE = Decimal('100')
TEN_PERCENT = Decimal(10) / Decimal(100)
def setUp(self):
cart_modifiers_pool.USE_CACHE = False
self.user = User.objects.create(username="test",
email="test@example.com")
self.product = Product()
self.product.name = "TestPrduct"
self.product.slug = "TestPrduct"
self.product.short_description = "TestPrduct"
self.product.long_description = "TestPrduct"
self.product.active = True
self.product.unit_price = self.PRODUCT_PRICE
self.product.save()
self.cart = Cart()
self.cart.user = self.user
self.cart.save()
def test_empty_cart_costs_0_quantity_0(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.cart.update()
self.assertEqual(self.cart.subtotal_price, Decimal('0.0'))
self.assertEqual(self.cart.total_price, Decimal('0.0'))
self.assertEqual(self.cart.total_quantity, 0)
def test_one_object_no_modifiers(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.cart.add_product(self.product)
self.cart.save()
self.cart.update()
self.cart.save()
self.assertEqual(self.cart.subtotal_price, self.PRODUCT_PRICE)
self.assertEqual(self.cart.total_price, self.PRODUCT_PRICE)
self.assertEqual(self.cart.total_quantity, 1)
def test_two_objects_no_modifier(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
# We add two objects now :)
self.cart.add_product(self.product, 2)
self.cart.update()
self.cart.save()
self.assertEqual(self.cart.subtotal_price, self.PRODUCT_PRICE * 2)
self.assertEqual(self.cart.total_price, self.PRODUCT_PRICE * 2)
self.assertEqual(self.cart.total_quantity, 2)
def test_one_object_simple_modifier(self):
MODIFIERS = [
'shop.cart.modifiers.tax_modifiers.TenPercentGlobalTaxModifier']
with SettingsOverride(SHOP_CART_MODIFIERS=MODIFIERS):
self.cart.add_product(self.product)
self.cart.update()
self.cart.save()
self.assertEqual(self.cart.subtotal_price, self.PRODUCT_PRICE)
self.assertEqual(self.cart.total_price,
(self.TEN_PERCENT * self.PRODUCT_PRICE) + self.PRODUCT_PRICE)
def test_one_object_two_modifiers_no_rebate(self):
MODIFIERS = [
'shop.cart.modifiers.tax_modifiers.TenPercentGlobalTaxModifier',
'shop.cart.modifiers.rebate_modifiers.BulkRebateModifier']
with SettingsOverride(SHOP_CART_MODIFIERS=MODIFIERS):
self.cart.add_product(self.product)
self.cart.update()
self.cart.save()
self.assertEqual(self.cart.subtotal_price, self.PRODUCT_PRICE)
self.assertEqual(self.cart.total_price,
(self.TEN_PERCENT * self.PRODUCT_PRICE) + self.PRODUCT_PRICE)
def test_one_object_two_modifiers_with_rebate(self):
MODIFIERS = [
'shop.cart.modifiers.tax_modifiers.TenPercentGlobalTaxModifier',
'shop.cart.modifiers.rebate_modifiers.BulkRebateModifier']
with SettingsOverride(SHOP_CART_MODIFIERS=MODIFIERS):
# We add 6 objects now :)
self.cart.add_product(self.product, 6)
self.cart.update()
self.cart.save()
#subtotal is 600 - 10% = 540
sub_should_be = (6 * self.PRODUCT_PRICE) - (
self.TEN_PERCENT * (6 * self.PRODUCT_PRICE))
total_should_be = sub_should_be + (
self.TEN_PERCENT * sub_should_be)
self.assertEqual(self.cart.subtotal_price, sub_should_be)
self.assertEqual(self.cart.total_price, total_should_be)
def test_add_same_object_twice(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.assertEqual(self.cart.total_quantity, 0)
self.cart.add_product(self.product)
self.cart.add_product(self.product)
self.cart.update()
self.cart.save()
self.assertEqual(len(self.cart.items.all()), 1)
self.assertEqual(self.cart.items.all()[0].quantity, 2)
self.assertEqual(self.cart.total_quantity, 2)
def test_add_same_object_twice_no_merge(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.assertEqual(self.cart.total_quantity, 0)
self.cart.add_product(self.product, merge=False)
self.cart.add_product(self.product, merge=False)
self.cart.update()
self.cart.save()
self.assertEqual(len(self.cart.items.all()), 2)
self.assertEqual(self.cart.items.all()[0].quantity, 1)
self.assertEqual(self.cart.items.all()[1].quantity, 1)
def test_add_product_updates_last_updated(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
initial = self.cart.last_updated
self.cart.add_product(self.product)
self.assertNotEqual(initial, self.cart.last_updated)
def test_cart_item_should_use_specific_type_to_get_price(self):
if SKIP_BASEPRODUCT_TEST:
return
base_product = BaseProduct.objects.create(
unit_price=self.PRODUCT_PRICE)
variation = base_product.productvariation_set.create(
name="Variation 1")
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.cart.add_product(variation)
self.cart.update()
self.cart.save()
self.assertEqual(self.cart.subtotal_price, self.PRODUCT_PRICE)
def test_update_quantity_deletes(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
self.assertEqual(self.cart.total_quantity, 0)
self.cart.add_product(self.product)
self.cart.add_product(self.product)
self.cart.update()
self.cart.save()
self.assertEqual(len(self.cart.items.all()), 1)
self.cart.update_quantity(self.cart.items.all()[0].id, 0)
self.assertEqual(len(self.cart.items.all()), 0)
def test_custom_queryset_is_used_when_passed_to_method(self):
with SettingsOverride(SHOP_CART_MODIFIERS=[]):
# first we add any product
self.cart.add_product(self.product)
# now we try to select a CartItem that does not exist yet. This
# could be an item with a yet unused combination of variations.
qs = CartItem.objects.filter(cart=self.cart, product=self.product,
quantity=42)
# although we add the same product and have merge=True, there
# should be a new CartItem being created now.
self.cart.add_product(self.product, queryset=qs)
self.assertEqual(len(self.cart.items.all()), 2)
def test_get_updated_cart_items(self):
self.cart.add_product(self.product)
self.cart.update()
cached_cart_items = self.cart.get_updated_cart_items()
cart_items = CartItem.objects.filter(cart=self.cart)
for item in cart_items:
item.update({})
self.assertEqual(len(cached_cart_items), len(cart_items))
self.assertEqual(cached_cart_items[0].line_total,
cart_items[0].line_total)
def test_get_updated_cart_items_without_updating_cart(self):
with self.assertRaises(AssertionError):
self.cart.get_updated_cart_items()
| gpl-3.0 |
hzopak/SmartTravellerToMobi | smarttraveller_to_mobi.py | 1 | 7920 | '''
Smart Traveller information to mobi script.
This script relies on having calibre's ebook-convert inbuilt program.
http://calibre-ebook.com/
Once the webdesigners at smarttraveller.gov.au decide to update their website I'll have to rewrite this to make it work again.
- Hoz, October 2013.
'''
import argparse
import codecs
import lxml.html
import lxml.html.clean
import os
import pickle
import time
import urllib
# Some basic settings
BASE_HTTP = 'http://www.smarttraveller.gov.au'
COUNTRY_LIST_URL = BASE_HTTP + '/zw-cgi/view/Advice/'
COUNTRY_LIST_FILE = 'country_list.pickle'
SAVE_DIR = 'country_html'
TOC_HTML = os.path.join(SAVE_DIR, '_toc.html')
MAIN_HTML = os.path.join(SAVE_DIR, '_main.html')
TOC_HEADER_FILE = "toc_header.html"
EBOOK_OUTPUT_FILE = "smarttraveller%s.mobi" % time.strftime("%b%Y")
EBOOK_COVER = "cover.jpg"
EBOOK_CONVERT = '/Applications/calibre.app/Contents/MacOS/ebook-convert'
def goto_website_return_html(url):
'''
Very basic function, could probably do without it.
'''
url_handler = urllib.urlopen(url)
return url_handler.read()
def find_country_list(filename):
'''
This function will try to find a pickle file (filename) and try to return a loaded pickle of that.
If it does not find it, then it goes to smarttraveller and downloads the latest country list.
'''
country_list = {}
try:
pickle_file = open(filename, 'rb')
country_list = pickle.load(pickle_file)
pickle_file.close()
except:
# Can't open or use pickle file.
# Must fetch a new list.
html = goto_website_return_html(COUNTRY_LIST_URL)
root = lxml.html.fromstring(html)
elements = root.find_class('topicRow')
for element in elements:
country = element.find_class('hidden')[0].text
href = BASE_HTTP + element.find_class('topicTitle')[0].get('href')
issue_date = element.find_class('issueDate')[0].text
if issue_date:
issue_date = time.strftime('%d %b %Y', time.strptime(issue_date, '%d/%m/%Y'))
country_list[country] = {}
country_list[country]['url'] = href
country_list[country]['issue_date'] = issue_date
country_list[country]['safe_name'] = href.split('/')[-1]
country_list[country]['file_name'] = os.path.join(SAVE_DIR, country_list[country]['safe_name'] + '.html')
if country_list:
# Got data now try to save the pickle file.
pickle_file = open(filename, 'wb')
pickle.dump(country_list, pickle_file)
pickle_file.close()
return country_list
def get_country_html(url):
'''
This function goes to the country specific url and grabs the relevant advice html.
It then also strips away html tags not required for ebook reading.
'''
html = goto_website_return_html(url)
tree = lxml.html.fromstring(html)
# The advice information is located in the <article id="theArticle"> tag.
article = tree.xpath("//article[@id='theArticle']")[0]
try:
# This has maps and videos, doesn't really place nice with ebooks.
removeme = article.xpath("//section[@class='mediaFiles']")[0]
removeme.getparent().remove(removeme)
except:
pass
articlehtml = lxml.html.tostring(article)
# I don't want extra tags!
cleaner = lxml.html.clean.Cleaner(safe_attrs_only=True, remove_tags=['a', 'article', 'section', 'span', 'div'])
cleansed = cleaner.clean_html(articlehtml)
output_html = cleansed.decode('utf-8')
return output_html
def build_table_of_contents(country_list):
'''
This function builds the top half of the output html. It's a bit of a sloppy way to do this, but it works.
'''
header_text = "<!DOCTYPE html><html><head><style type='text/css'>.toc { page-break-after: always; text-indent: 0em; }</style></head><body><h1>Table of Contents</h1><ul id='toc'>"
output_html = header_text
for country in sorted(country_list):
# make sure the links are nice for table of contents building.
output_html += "<li><a href=\"#%s\">%s</a> (Issued: %s)</li>" % (country_list[country]['safe_name'], country, country_list[country]['issue_date'])
output_html += "</ul>\n"
return output_html
def build_big_file(country_list, output_file):
'''
Build the big html file (it can be like 3 meg or something).
This requires a helper function: build_table_of_contents() to build the heading for the output html file.
Because the file gets large, I decided to make it write to the file on the fly. I didn't want to store all the data into a large variable.
'''
outfile = codecs.open(output_file, mode='w', encoding='utf-8')
header_text = build_table_of_contents(country_list)
outfile.write(header_text)
for country in sorted(country_list):
cfile = codecs.open(country_list[country]['file_name'], mode='r', encoding='utf-8')
cfile_contents = cfile.read()
cfile.close()
# Create a heading with table of contents link.
# class='chapter' is something that ebook-convert looks for.
outfile.write("<h1 class='chapter' id='%s'>%s</h1>\n" % (country_list[country]['safe_name'], country))
# For some reason a div tag doesn't get removed when it's getting 'cleansed'. This replace is a bit of a hack.
outfile.write(cfile_contents.replace('<div>', '').replace('</div>', ''))
outfile.write("</body></html>")
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='http://smarttraveller.gov.au -> MOBI converter')
parser.add_argument('-u', help='Update country html files', action='store_true')
parser.add_argument('-b', help='Build large html file', action='store_true')
parser.add_argument('-o', help='Output .mobi file', action='store_true')
args = parser.parse_args()
# If you didn't specify the script anything to do, print help and quit.
if not args.u and not args.b and not args.o:
parser.print_help()
exit()
# Keep this in here just to make sure a directory exists.
if not os.path.isdir(SAVE_DIR):
os.makedirs(SAVE_DIR)
print "Created Directory: %s" % os.path.abspath(SAVE_DIR)
# Populate country_list, either form pickle or loaded form website.
country_list = find_country_list(COUNTRY_LIST_FILE)
# error handling, this might be a good indicator if they decide to redesign their website.
if not country_list:
print "Problems with finding Country List!"
exit()
print "Got country list with %d countries." % len(country_list)
# Update country html files
if args.u:
for country in sorted(country_list):
html = get_country_html(country_list[country]['url'])
outfile = codecs.open(country_list[country]['file_name'], mode='w', encoding='utf-8')
outfile.write(html)
outfile.close()
print country_list[country]['file_name'], "written."
print "------------------------------\nFinished updating html files"
# Create big html file
if args.b:
build_big_file(country_list, MAIN_HTML)
print "Built output html: %s" % os.path.abspath(MAIN_HTML)
# Create output mobi file. This takes time.
if args.o:
sys_command = "%s %s %s -v -v --max-toc-links=0 --no-chapters-in-toc --output-profile=kindle --change-justification=justify --chapter-mark=both --authors='Australian Government' --book-producer='Hoz' --language='English' --pretty-print --toc-filter=r'*' --title='Smart Traveller (%s)' --pubdate='%s' --comments='This is information taken from smarttraveller.gov.au'" % (EBOOK_CONVERT, os.path.abspath(MAIN_HTML), os.path.abspath(EBOOK_OUTPUT_FILE), time.strftime("%b, %Y"), time.strftime("%d %b %Y"))
print "Executing: %s" % sys_command
os.system(sys_command)
| mit |
shao2610/us780 | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
willingc/oh-mainline | vendor/packages/twisted/twisted/conch/client/agent.py | 69 | 1730 | # -*- test-case-name: twisted.conch.test.test_default -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Accesses the key agent for user authentication.
Maintainer: Paul Swartz
"""
import os
from twisted.conch.ssh import agent, channel, keys
from twisted.internet import protocol, reactor
from twisted.python import log
class SSHAgentClient(agent.SSHAgentClient):
def __init__(self):
agent.SSHAgentClient.__init__(self)
self.blobs = []
def getPublicKeys(self):
return self.requestIdentities().addCallback(self._cbPublicKeys)
def _cbPublicKeys(self, blobcomm):
log.msg('got %i public keys' % len(blobcomm))
self.blobs = [x[0] for x in blobcomm]
def getPublicKey(self):
"""
Return a L{Key} from the first blob in C{self.blobs}, if any, or
return C{None}.
"""
if self.blobs:
return keys.Key.fromString(self.blobs.pop(0))
return None
class SSHAgentForwardingChannel(channel.SSHChannel):
def channelOpen(self, specificData):
cc = protocol.ClientCreator(reactor, SSHAgentForwardingLocal)
d = cc.connectUNIX(os.environ['SSH_AUTH_SOCK'])
d.addCallback(self._cbGotLocal)
d.addErrback(lambda x:self.loseConnection())
self.buf = ''
def _cbGotLocal(self, local):
self.local = local
self.dataReceived = self.local.transport.write
self.local.dataReceived = self.write
def dataReceived(self, data):
self.buf += data
def closed(self):
if self.local:
self.local.loseConnection()
self.local = None
class SSHAgentForwardingLocal(protocol.Protocol):
pass
| agpl-3.0 |
bmiklautz/thrift | test/py/util.py | 43 | 1177 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import glob
import os
import sys
_SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
_ROOT_DIR = os.path.dirname(os.path.dirname(_SCRIPT_DIR))
def local_libpath():
globdir = os.path.join(_ROOT_DIR, 'lib', 'py', 'build', 'lib.*')
for libpath in glob.glob(globdir):
if libpath.endswith('-%d.%d' % (sys.version_info[0], sys.version_info[1])):
return libpath
| apache-2.0 |
qPCR4vir/orange | Orange/OrangeWidgets/Data/OWDataSampler.py | 6 | 16671 | import random
import Orange
from Orange.data import sample
import OWGUI
from OWWidget import *
NAME = "Data Sampler"
DESCRIPTION = "Samples data from a data set."
ICON = "icons/DataSampler.svg"
PRIORITY = 1125
CATEGORY = "Data"
MAINTAINER = "Aleksander Sadikov"
MAINTAINER_EMAIL = "aleksander.sadikov(@at@)fri.uni-lj.si"
INPUTS = [("Data", Orange.data.Table, "setData", Default)]
OUTPUTS = [("Data Sample", Orange.data.Table, ),
("Remaining Data", Orange.data.Table, )]
class OWDataSampler(OWWidget):
settingsList = [
"Stratified", "Repeat", "UseSpecificSeed", "RandomSeed",
"GroupSeed", "outFold", "Folds", "SelectType", "useCases", "nCases",
"selPercentage", "CVFolds", "nGroups",
"pGroups", "GroupText", "autocommit"]
contextHandlers = {
"": DomainContextHandler("", ["nCases", "selPercentage"])
}
def __init__(self, parent=None, signalManager=None):
OWWidget.__init__(self, parent, signalManager, 'SampleData',
wantMainArea=0)
self.inputs = [("Data", ExampleTable, self.setData)]
self.outputs = [("Data Sample", ExampleTable),
("Remaining Data", ExampleTable)]
# initialization of variables
self.data = None # dataset (incoming stream)
self.indices = None # indices that control sampling
self.Stratified = 1 # use stratified sampling if possible?
self.Repeat = 0 # can elements repeat in a sample?
self.UseSpecificSeed = 0 # use a specific random seed?
self.RandomSeed = 1 # specific seed used
self.GroupSeed = 1 # current seed for multiple group selection
self.outFold = 1 # folder/group to output
self.Folds = 1 # total number of folds/groups
self.SelectType = 0 # sampling type (LOO, CV, ...)
self.useCases = 0 # use a specific number of cases?
self.nCases = 25 # number of cases to use
self.selPercentage = 30 # sample size in %
self.CVFolds = 10 # number of CV folds
self.nGroups = 3 # number of groups
self.pGroups = [0.1, 0.25, 0.5] # sizes of groups
self.GroupText = '0.1,0.25,0.5' # assigned to Groups Control (for internal use)
self.autocommit = False
# Invalidated settings flag.
self.outputInvalidateFlag = False
self.loadSettings()
# GUI
# Info Box
box1 = OWGUI.widgetBox(self.controlArea, "Information", addSpace=True)
# Input data set info
self.infoa = OWGUI.widgetLabel(box1, 'No data on input.')
# Sampling type/parameters info
self.infob = OWGUI.widgetLabel(box1, ' ')
# Output data set info
self.infoc = OWGUI.widgetLabel(box1, ' ')
# Options Box
box2 = OWGUI.widgetBox(self.controlArea, 'Options', addSpace=True)
OWGUI.checkBox(box2, self, 'Stratified', 'Stratified (if possible)',
callback=self.settingsChanged)
OWGUI.checkWithSpin(
box2, self, 'Set random seed:', 0, 32767,
'UseSpecificSeed',
'RandomSeed',
checkCallback=self.settingsChanged,
spinCallback=self.settingsChanged
)
# Sampling Type Box
self.s = [None, None, None, None]
self.sBox = OWGUI.widgetBox(self.controlArea, "Sampling type",
addSpace=True)
self.sBox.buttons = []
# Random Sampling
self.s[0] = OWGUI.appendRadioButton(self.sBox, self, "SelectType",
'Random sampling')
# indent
indent = OWGUI.checkButtonOffsetHint(self.s[0])
# repeat checkbox
self.h1Box = OWGUI.indentedBox(self.sBox, sep=indent,
orientation="horizontal")
OWGUI.checkBox(self.h1Box, self, 'Repeat', 'With replacement',
callback=self.settingsChanged)
# specified number of elements checkbox
self.h2Box = OWGUI.indentedBox(self.sBox, sep=indent,
orientation="horizontal")
check, _ = OWGUI.checkWithSpin(
self.h2Box, self, 'Sample size (instances):', 1, 1000000000,
'useCases', 'nCases',
checkCallback=self.settingsChanged,
spinCallback=self.settingsChanged
)
# percentage slider
self.h3Box = OWGUI.indentedBox(self.sBox, sep=indent)
OWGUI.widgetLabel(self.h3Box, "Sample size:")
self.slidebox = OWGUI.widgetBox(self.h3Box, orientation="horizontal")
OWGUI.hSlider(self.slidebox, self, 'selPercentage',
minValue=1, maxValue=100, step=1, ticks=10,
labelFormat=" %d%%",
callback=self.settingsChanged)
# Sample size (instances) check disables the Percentage slider.
# TODO: Should be an exclusive option (radio buttons)
check.disables.extend([(-1, self.h3Box)])
check.makeConsistent()
# Cross Validation sampling options
self.s[1] = OWGUI.appendRadioButton(self.sBox, self, "SelectType",
"Cross validation")
box = OWGUI.indentedBox(self.sBox, sep=indent,
orientation="horizontal")
OWGUI.spin(box, self, 'CVFolds', 2, 100, step=1,
label='Number of folds: ',
callback=self.settingsChanged)
# Leave-One-Out
self.s[2] = OWGUI.appendRadioButton(self.sBox, self, "SelectType",
"Leave-one-out")
# Multiple Groups
self.s[3] = OWGUI.appendRadioButton(self.sBox, self, "SelectType",
'Multiple subsets')
gbox = OWGUI.indentedBox(self.sBox, sep=indent,
orientation="horizontal")
OWGUI.lineEdit(gbox, self, 'GroupText',
label='Subset sizes (e.g. "0.1, 0.2, 0.5"):',
callback=self.multipleChanged)
# Output Group Box
box = OWGUI.widgetBox(self.controlArea, 'Output Data for Fold / Group',
addSpace=True)
self.foldcombo = OWGUI.comboBox(
box, self, "outFold", items=range(1, 101),
label='Fold / group:', orientation="horizontal",
sendSelectedValue=1, valueType=int,
callback=self.invalidate
)
self.foldcombo.setEnabled(self.SelectType != 0)
# Sample Data box
OWGUI.rubber(self.controlArea)
box = OWGUI.widgetBox(self.controlArea, "Sample Data")
cb = OWGUI.checkBox(box, self, "autocommit", "Sample on any change")
self.sampleButton = OWGUI.button(box, self, 'Sample &Data',
callback=self.sdata, default=True)
OWGUI.setStopper(self, self.sampleButton, cb, "outputInvalidateFlag",
callback=self.sdata)
# set initial radio button on (default sample type)
self.s[self.SelectType].setChecked(True)
# Connect radio buttons (SelectType)
for i, button in enumerate(self.s):
button.toggled[bool].connect(
lambda state, i=i: self.samplingTypeChanged(state, i)
)
self.process()
self.resize(200, 275)
# CONNECTION TRIGGER AND GUI ROUTINES
# enables RadioButton switching
def samplingTypeChanged(self, value, i):
"""Sampling type changed."""
self.SelectType = i
self.settingsChanged()
def multipleChanged(self):
"""Multiple subsets (Groups) changed."""
self.error(1)
try:
self.pGroups = [float(x) for x in self.GroupText.split(',')]
self.nGroups = len(self.pGroups)
except:
self.error(1, "Invalid specification for sizes of subsets.")
else:
self.settingsChanged()
def updateFoldCombo(self):
"""Update the 'Folds' combo box contents."""
fold = self.outFold
self.Folds = 1
if self.SelectType == 1:
self.Folds = self.CVFolds
elif self.SelectType == 2:
if self.data:
self.Folds = len(self.data)
else:
self.Folds = 1
elif self.SelectType == 3:
self.Folds = self.nGroups
self.foldcombo.clear()
for x in range(self.Folds):
self.foldcombo.addItem(str(x + 1))
self.outFold = min(fold, self.Folds)
def setData(self, dataset):
"""Set the input data set."""
self.closeContext()
if dataset is not None:
self.infoa.setText('%d instances in input data set.' %
len(dataset))
self.data = dataset
self.openContext("", dataset)
self.process()
self.sdata()
else:
self.infoa.setText('No data on input.')
self.infob.setText('')
self.infoc.setText('')
self.send("Data Sample", None)
self.send("Remaining Data", None)
self.data = None
# feeds the output stream
def sdata(self):
if not self.data:
return
# select data
if self.SelectType == 0:
if self.useCases == 1 and self.Repeat == 1:
indices = self.indices(self.data)
sample = [self.data[i] for i in indices]
sample = Orange.data.Table(self.data.domain, sample)
remainder = None
else:
indices = self.indices(self.data)
sample = self.data.select(indices, 0)
remainder = self.data.select(indices, 1)
self.infoc.setText('Output: %d instances.' % len(sample))
elif self.SelectType == 3:
indices = self.indices(self.data, p0=self.pGroups[self.outFold - 1])
sample = self.data.select(indices, 0)
remainder = self.data.select(indices, 1)
self.infoc.setText(
'Output: subset %(fold)d of %(folds)d, %(len)d instance(s).' %
{"fold": self.outFold, "folds": self.Folds, "len": len(sample)}
)
else:
# CV/LOO
indices = self.indices(self.data)
sample = self.data.select(indices, self.outFold - 1)
remainder = self.data.select(indices, self.outFold - 1, negate=1)
self.infoc.setText(
'Output: fold %(fold)d of %(folds)d, %(len)d instance(s).' %
{"fold": self.outFold, "folds": self.Folds, "len": len(sample)}
)
if sample is not None:
sample.name = self.data.name
if remainder is not None:
remainder.name = self.data.name
# send data
self.nSample = len(sample)
self.nRemainder = len(remainder) if remainder is not None else 0
self.send("Data Sample", sample)
self.send("Remaining Data", remainder)
self.outputInvalidateFlag = False
def process(self):
self.error(0)
self.warning(0)
self.infob.setText('')
if self.SelectType == 0:
# Random Selection
if self.useCases == 1:
ncases = self.nCases
if self.Repeat == 0:
ncases = self.nCases
if self.data is not None and ncases > len(self.data):
self.warning(0, "Sample size (w/o repetitions) larger than dataset.")
ncases = len(self.data)
p0 = ncases + 1e-7 if ncases == 1 else ncases
self.indices = sample.SubsetIndices2(p0=p0)
self.infob.setText('Random sampling, using exactly %d instances.' % ncases)
else:
p0 = ncases + 1e-7 if ncases == 1 else ncases
self.indices = sample.SubsetIndicesMultiple(p0=p0)
self.infob.setText('Random sampling with repetitions, %d instances.' % ncases)
else:
if self.selPercentage == 100:
p0 = len(self.data) if self.data is not None else 1.0
else:
p0 = float(self.selPercentage) / 100.0
self.indices = sample.SubsetIndices2(p0=p0)
self.infob.setText('Random sampling, %d%% of input instances.' % self.selPercentage)
if self.Stratified == 1:
self.indices.stratified = self.indices.StratifiedIfPossible
else:
self.indices.stratified = self.indices.NotStratified
if self.UseSpecificSeed == 1:
self.indices.randseed = self.RandomSeed
else:
self.indices.randomGenerator = Orange.misc.Random(random.randint(0,65536))
# Cross Validation / LOO
elif self.SelectType == 1 or self.SelectType == 2:
# apply selected options
if self.SelectType == 2:
folds = len(self.data) if self.data is not None else 1
self.infob.setText('Leave-one-out.')
else:
folds = self.CVFolds
self.infob.setText('%d-fold cross validation.' % self.CVFolds)
self.indices = sample.SubsetIndicesCV(folds=folds)
if self.Stratified == 1:
self.indices.stratified = self.indices.StratifiedIfPossible
else:
self.indices.stratified = self.indices.NotStratified
if self.UseSpecificSeed == 1:
self.indices.randseed = self.RandomSeed
else:
self.indices.randseed = random.randint(0, 65536)
# MultiGroup
elif self.SelectType == 3:
self.infob.setText('Multiple subsets.')
#prepare indices generator
self.indices = sample.SubsetIndices2()
if self.Stratified == 1:
self.indices.stratified = self.indices.StratifiedIfPossible
else:
self.indices.stratified = self.indices.NotStratified
if self.UseSpecificSeed == 1:
self.indices.randseed = self.RandomSeed
else:
self.indices.randomGenerator = Orange.misc.Random(random.randint(0,65536))
def settingsChanged(self):
# enable fold selection and fill combobox if applicable
if self.SelectType == 0:
self.foldcombo.setEnabled(False)
else:
self.foldcombo.setEnabled(True)
self.updateFoldCombo()
self.process()
self.invalidate()
def invalidate(self):
"""Invalidate current output."""
self.infoc.setText('...')
if self.autocommit:
self.sdata()
else:
self.outputInvalidateFlag = True
def sendReport(self):
if self.SelectType == 0:
if self.useCases:
stype = "Random sample of %i instances" % self.nCases
else:
stype = "Random sample with %i%% instances" % self.selPercentage
elif self.SelectType == 1:
stype = "%i-fold cross validation" % self.CVFolds
elif self.SelectType == 2:
stype = "Leave one out"
elif self.SelectType == 3:
stype = "Multiple subsets"
self.reportSettings("Settings", [("Sampling type", stype),
("Stratification", OWGUI.YesNo[self.Stratified]),
("Random seed", str(self.RandomSeed) if self.UseSpecificSeed else "auto")])
if self.data is not None:
self.reportSettings("Data", [("Input", "%i examples" % len(self.data)),
("Sample", "%i examples" % self.nSample),
("Rest", "%i examples" % self.nRemainder)])
else:
self.reportSettings("Data", [("Input", "None")])
if __name__ == "__main__":
appl = QApplication(sys.argv)
ow = OWDataSampler()
data = Orange.data.Table('iris.tab')
ow.setData(data)
ow.show()
appl.exec_()
ow.saveSettings()
| gpl-3.0 |
lthall/Leonard_ardupilot | Tools/scripts/build_binaries.py | 9 | 33402 | #!/usr/bin/env python
"""
script to build the latest binaries for each vehicle type, ready to upload
Peter Barker, August 2017
based on build_binaries.sh by Andrew Tridgell, March 2013
AP_FLAKE8_CLEAN
"""
from __future__ import print_function
import datetime
import optparse
import os
import re
import shutil
import time
import string
import subprocess
import sys
import gzip
# local imports
import generate_manifest
import gen_stable
import build_binaries_history
if sys.version_info[0] < 3:
running_python3 = False
else:
running_python3 = True
class build_binaries(object):
def __init__(self, tags):
self.tags = tags
self.dirty = False
binaries_history_filepath = os.path.join(self.buildlogs_dirpath(),
"build_binaries_history.sqlite")
self.history = build_binaries_history.BuildBinariesHistory(binaries_history_filepath)
def progress(self, string):
'''pretty-print progress'''
print("BB: %s" % string)
def run_git(self, args):
'''run git with args git_args; returns git's output'''
cmd_list = ["git"]
cmd_list.extend(args)
return self.run_program("BB-GIT", cmd_list)
def board_branch_bit(self, board):
'''return a fragment which might modify the branch name.
this was previously used to have a master-AVR branch etc
if the board type was apm1 or apm2'''
return None
def board_options(self, board):
'''return board-specific options'''
if board == "bebop":
return ["--static"]
return []
def run_waf(self, args):
if os.path.exists("waf"):
waf = "./waf"
else:
waf = os.path.join(".", "modules", "waf", "waf-light")
cmd_list = [waf]
cmd_list.extend(args)
self.run_program("BB-WAF", cmd_list)
def run_program(self, prefix, cmd_list, show_output=True):
if show_output:
self.progress("Running (%s)" % " ".join(cmd_list))
p = subprocess.Popen(cmd_list, bufsize=1, stdin=None,
stdout=subprocess.PIPE, close_fds=True,
stderr=subprocess.STDOUT)
output = ""
while True:
x = p.stdout.readline()
if len(x) == 0:
returncode = os.waitpid(p.pid, 0)
if returncode:
break
# select not available on Windows... probably...
time.sleep(0.1)
continue
if running_python3:
x = bytearray(x)
x = filter(lambda x : chr(x) in string.printable, x)
x = "".join([chr(c) for c in x])
output += x
x = x.rstrip()
if show_output:
print("%s: %s" % (prefix, x))
(_, status) = returncode
if status != 0 and show_output:
self.progress("Process failed (%s)" %
str(returncode))
raise subprocess.CalledProcessError(
returncode, cmd_list)
return output
def run_make(self, args):
cmd_list = ["make"]
cmd_list.extend(args)
self.run_program("BB-MAKE", cmd_list)
def run_git_update_submodules(self):
'''if submodules are present initialise and update them'''
if os.path.exists(os.path.join(self.basedir, ".gitmodules")):
self.run_git(["submodule",
"update",
"--init",
"--recursive",
"-f"])
def checkout(self, vehicle, ctag, cboard=None, cframe=None, submodule_update=True):
'''attempt to check out a git tree. Various permutations are
attempted based on ctag - for examplle, if the board is avr and ctag
is bob we will attempt to checkout bob-AVR'''
if self.dirty:
self.progress("Skipping checkout for dirty build")
return True
self.progress("Trying checkout %s %s %s %s" %
(vehicle, ctag, cboard, cframe))
self.run_git(['stash'])
if ctag == "latest":
vtag = "master"
else:
tagvehicle = vehicle
if tagvehicle == "Rover":
# FIXME: Rover tags in git still named APMrover2 :-(
tagvehicle = "APMrover2"
vtag = "%s-%s" % (tagvehicle, ctag)
branches = []
if cframe is not None:
# try frame specific tag
branches.append("%s-%s" % (vtag, cframe))
if cboard is not None:
bbb = self.board_branch_bit(cboard)
if bbb is not None:
# try board type specific branch extension
branches.append("".join([vtag, bbb]))
branches.append(vtag)
for branch in branches:
try:
self.progress("Trying branch %s" % branch)
self.run_git(["checkout", "-f", branch])
if submodule_update:
self.run_git_update_submodules()
self.run_git(["log", "-1"])
return True
except subprocess.CalledProcessError:
self.progress("Checkout branch %s failed" % branch)
self.progress("Failed to find tag for %s %s %s %s" %
(vehicle, ctag, cboard, cframe))
return False
def skip_board_waf(self, board):
'''check if we should skip this build because we don't support the
board in this release
'''
try:
out = self.run_program('waf', ['./waf', 'configure', '--board=BOARDTEST'], False)
lines = out.split('\n')
needles = ["BOARDTEST' (choose from", "BOARDTEST': choices are"]
for line in lines:
for needle in needles:
idx = line.find(needle)
if idx != -1:
break
if idx != -1:
line = line[idx+len(needle):-1]
line = line.replace("'", "")
line = line.replace(" ", "")
boards = line.split(",")
return board not in boards
except IOError as e:
if e.errno != 2:
raise
self.progress("Skipping unsupported board %s" % (board,))
return True
def skip_frame(self, board, frame):
'''returns true if this board/frame combination should not be built'''
if frame == "heli":
if board in ["bebop", "aerofc-v1", "skyviper-v2450", "CubeSolo", "CubeGreen-solo", 'skyviper-journey']:
self.progress("Skipping heli build for %s" % board)
return True
return False
def first_line_of_filepath(self, filepath):
'''returns the first (text) line from filepath'''
with open(filepath) as fh:
line = fh.readline()
return line
def skip_build(self, buildtag, builddir):
'''check if we should skip this build because we have already built
this version
'''
if os.getenv("FORCE_BUILD", False):
return False
if not os.path.exists(os.path.join(self.basedir, '.gitmodules')):
self.progress("Skipping build without submodules")
return True
bname = os.path.basename(builddir)
ldir = os.path.join(os.path.dirname(os.path.dirname(
os.path.dirname(builddir))), buildtag, bname) # FIXME: WTF
oldversion_filepath = os.path.join(ldir, "git-version.txt")
if not os.path.exists(oldversion_filepath):
self.progress("%s doesn't exist - building" % oldversion_filepath)
return False
oldversion = self.first_line_of_filepath(oldversion_filepath)
newversion = self.run_git(["log", "-1"])
newversion = newversion.splitlines()[0]
oldversion = oldversion.rstrip()
newversion = newversion.rstrip()
self.progress("oldversion=%s newversion=%s" %
(oldversion, newversion,))
if oldversion == newversion:
self.progress("Skipping build - version match (%s)" %
(newversion,))
return True
self.progress("%s needs rebuild" % (ldir,))
return False
def write_string_to_filepath(self, string, filepath):
'''writes the entirety of string to filepath'''
with open(filepath, "w") as x:
x.write(string)
def version_h_path(self, src):
'''return path to version.h'''
if src == 'AP_Periph':
return os.path.join('Tools', src, "version.h")
return os.path.join(src, "version.h")
def addfwversion_gitversion(self, destdir, src):
# create git-version.txt:
gitlog = self.run_git(["log", "-1"])
gitversion_filepath = os.path.join(destdir, "git-version.txt")
gitversion_content = gitlog
versionfile = self.version_h_path(src)
if os.path.exists(versionfile):
content = self.read_string_from_filepath(versionfile)
match = re.search('define.THISFIRMWARE "([^"]+)"', content)
if match is None:
self.progress("Failed to retrieve THISFIRMWARE from version.h")
self.progress("Content: (%s)" % content)
self.progress("Writing version info to %s" %
(gitversion_filepath,))
gitversion_content += "\nAPMVERSION: %s\n" % (match.group(1))
else:
self.progress("%s does not exist" % versionfile)
self.write_string_to_filepath(gitversion_content, gitversion_filepath)
def addfwversion_firmwareversiontxt(self, destdir, src):
# create firmware-version.txt
versionfile = self.version_h_path(src)
if not os.path.exists(versionfile):
self.progress("%s does not exist" % (versionfile,))
return
ss = r".*define +FIRMWARE_VERSION[ ]+(?P<major>\d+)[ ]*,[ ]*" \
r"(?P<minor>\d+)[ ]*,[ ]*(?P<point>\d+)[ ]*,[ ]*" \
r"(?P<type>[A-Z_]+)[ ]*"
content = self.read_string_from_filepath(versionfile)
match = re.search(ss, content)
if match is None:
self.progress("Failed to retrieve FIRMWARE_VERSION from version.h")
self.progress("Content: (%s)" % content)
return
ver = "%d.%d.%d-%s\n" % (int(match.group("major")),
int(match.group("minor")),
int(match.group("point")),
match.group("type"))
firmware_version_filepath = "firmware-version.txt"
self.progress("Writing version (%s) to %s" %
(ver, firmware_version_filepath,))
self.write_string_to_filepath(
ver, os.path.join(destdir, firmware_version_filepath))
def addfwversion(self, destdir, src):
'''write version information into destdir'''
self.addfwversion_gitversion(destdir, src)
self.addfwversion_firmwareversiontxt(destdir, src)
def read_string_from_filepath(self, filepath):
'''returns content of filepath as a string'''
with open(filepath, 'rb') as fh:
content = fh.read()
return content
def string_in_filepath(self, string, filepath):
'''returns true if string exists in the contents of filepath'''
return string in self.read_string_from_filepath(filepath)
def mkpath(self, path):
'''make directory path and all elements leading to it'''
'''distutils.dir_util.mkpath was playing up'''
try:
os.makedirs(path)
except OSError as e:
if e.errno != 17: # EEXIST
raise e
def copyit(self, afile, adir, tag, src):
'''copies afile into various places, adding metadata'''
bname = os.path.basename(adir)
tdir = os.path.join(os.path.dirname(os.path.dirname(
os.path.dirname(adir))), tag, bname)
if tag == "latest":
# we keep a permanent archive of all "latest" builds,
# their path including a build timestamp:
self.mkpath(adir)
self.progress("Copying %s to %s" % (afile, adir,))
shutil.copy(afile, adir)
self.addfwversion(adir, src)
# the most recent build of every tag is kept around:
self.progress("Copying %s to %s" % (afile, tdir))
self.mkpath(tdir)
self.addfwversion(tdir, src)
shutil.copy(afile, tdir)
def touch_filepath(self, filepath):
'''creates a file at filepath, or updates the timestamp on filepath'''
if os.path.exists(filepath):
os.utime(filepath, None)
else:
with open(filepath, "a"):
pass
def build_vehicle(self, tag, vehicle, boards, vehicle_binaries_subdir,
binaryname, px4_binaryname, frames=[None]):
'''build vehicle binaries'''
self.progress("Building %s %s binaries (cwd=%s)" %
(vehicle, tag, os.getcwd()))
board_count = len(boards)
count = 0
for board in sorted(boards, key=str.lower):
now = datetime.datetime.now()
count += 1
self.progress("[%u/%u] Building board: %s at %s" %
(count, board_count, board, str(now)))
for frame in frames:
if frame is not None:
self.progress("Considering frame %s for board %s" %
(frame, board))
if frame is None:
framesuffix = ""
else:
framesuffix = "-%s" % frame
if not self.checkout(vehicle, tag, board, frame, submodule_update=False):
msg = ("Failed checkout of %s %s %s %s" %
(vehicle, board, tag, frame,))
self.progress(msg)
self.error_strings.append(msg)
continue
self.progress("Building %s %s %s binaries %s" %
(vehicle, tag, board, frame))
ddir = os.path.join(self.binaries,
vehicle_binaries_subdir,
self.hdate_ym,
self.hdate_ymdhm,
"".join([board, framesuffix]))
if self.skip_build(tag, ddir):
continue
if self.skip_frame(board, frame):
continue
# we do the submodule update after the skip_board_waf check to avoid doing it on
# builds we will not be running
self.run_git_update_submodules()
if self.skip_board_waf(board):
continue
if os.path.exists(self.buildroot):
shutil.rmtree(self.buildroot)
self.remove_tmpdir()
githash = self.run_git(["rev-parse", "HEAD"]).rstrip()
t0 = time.time()
self.progress("Configuring for %s in %s" %
(board, self.buildroot))
try:
waf_opts = ["configure",
"--board", board,
"--out", self.buildroot,
"clean"]
waf_opts.extend(self.board_options(board))
self.run_waf(waf_opts)
except subprocess.CalledProcessError:
self.progress("waf configure failed")
continue
try:
target = os.path.join("bin",
"".join([binaryname, framesuffix]))
self.run_waf(["build", "--targets", target])
except subprocess.CalledProcessError:
msg = ("Failed build of %s %s%s %s" %
(vehicle, board, framesuffix, tag))
self.progress(msg)
self.error_strings.append(msg)
# record some history about this build
t1 = time.time()
time_taken_to_build = t1-t0
self.history.record_build(githash, tag, vehicle, board, frame, None, t0, time_taken_to_build)
continue
t1 = time.time()
time_taken_to_build = t1-t0
self.progress("Building %s %s %s %s took %u seconds" %
(vehicle, tag, board, frame, time_taken_to_build))
bare_path = os.path.join(self.buildroot,
board,
"bin",
"".join([binaryname, framesuffix]))
files_to_copy = []
extensions = [".px4", ".apj", ".abin", "_with_bl.hex", ".hex"]
if vehicle == 'AP_Periph':
# need bin file for uavcan-gui-tool and MissionPlanner
extensions.append('.bin')
for extension in extensions:
filepath = "".join([bare_path, extension])
if os.path.exists(filepath):
files_to_copy.append(filepath)
if not os.path.exists(bare_path):
raise Exception("No elf file?!")
# only copy the elf if we don't have other files to copy
if len(files_to_copy) == 0:
files_to_copy.append(bare_path)
for path in files_to_copy:
try:
self.copyit(path, ddir, tag, vehicle)
except Exception as e:
self.progress("Failed to copy %s to %s: %s" % (path, ddir, str(e)))
# why is touching this important? -pb20170816
self.touch_filepath(os.path.join(self.binaries,
vehicle_binaries_subdir, tag))
# record some history about this build
self.history.record_build(githash, tag, vehicle, board, frame, bare_path, t0, time_taken_to_build)
if not self.checkout(vehicle, tag, "PX4", None):
self.checkout(vehicle, "latest")
return
board_list = self.run_program('BB-WAF', ['./waf', 'list_boards'])
board_list = board_list.split(' ')
self.checkout(vehicle, "latest")
if 'px4-v2' not in board_list:
print("Skipping px4 builds")
return
# PX4-building
board = "px4"
for frame in frames:
self.progress("Building frame %s for board %s" % (frame, board))
if frame is None:
framesuffix = ""
else:
framesuffix = "-%s" % frame
if not self.checkout(vehicle, tag, "PX4", frame):
msg = ("Failed checkout of %s %s %s %s" %
(vehicle, "PX4", tag, frame))
self.progress(msg)
self.error_strings.append(msg)
self.checkout(vehicle, "latest")
continue
try:
deadwood = "../Build.%s" % vehicle
if os.path.exists(deadwood):
self.progress("#### Removing (%s)" % deadwood)
shutil.rmtree(os.path.join(deadwood))
except Exception as e:
self.progress("FIXME: narrow exception (%s)" % repr(e))
self.progress("Building %s %s PX4%s binaries" %
(vehicle, tag, framesuffix))
ddir = os.path.join(self.binaries,
vehicle_binaries_subdir,
self.hdate_ym,
self.hdate_ymdhm,
"".join(["PX4", framesuffix]))
if self.skip_build(tag, ddir):
continue
for v in ["v1", "v2", "v3", "v4", "v4pro"]:
px4_v = "%s-%s" % (board, v)
if self.skip_board_waf(px4_v):
continue
if os.path.exists(self.buildroot):
shutil.rmtree(self.buildroot)
self.progress("Configuring for %s in %s" %
(px4_v, self.buildroot))
try:
self.run_waf(["configure", "--board", px4_v,
"--out", self.buildroot, "clean"])
except subprocess.CalledProcessError:
self.progress("waf configure failed")
continue
try:
self.run_waf([
"build",
"--targets",
os.path.join("bin",
"".join([binaryname, framesuffix]))])
except subprocess.CalledProcessError:
msg = ("Failed build of %s %s%s %s for %s" %
(vehicle, board, framesuffix, tag, v))
self.progress(msg)
self.error_strings.append(msg)
continue
oldfile = os.path.join(self.buildroot, px4_v, "bin",
"%s%s.px4" % (binaryname, framesuffix))
newfile = "%s-%s.px4" % (px4_binaryname, v)
self.progress("Copying (%s) to (%s)" % (oldfile, newfile,))
try:
shutil.copyfile(oldfile, newfile)
except Exception as e:
self.progress("FIXME: narrow exception (%s)" % repr(e))
msg = ("Failed build copy of %s PX4%s %s for %s" %
(vehicle, framesuffix, tag, v))
self.progress(msg)
self.error_strings.append(msg)
continue
# FIXME: why the two stage copy?!
self.copyit(newfile, ddir, tag, vehicle)
self.checkout(vehicle, "latest")
def common_boards(self):
'''returns list of boards common to all vehicles'''
return ["fmuv2",
"fmuv3",
"fmuv5",
"mindpx-v2",
"erlebrain2",
"navigator",
"navio",
"navio2",
"edge",
"pxf",
"pxfmini",
"KakuteF4",
"KakuteF7",
"KakuteF7Mini",
"KakuteF4Mini",
"MambaF405v2",
"MatekF405",
"MatekF405-bdshot",
"MatekF405-STD",
"MatekF405-Wing",
"MatekF765-Wing",
"MatekF405-CAN",
"MatekH743",
"MatekH743-bdshot",
"OMNIBUSF7V2",
"sparky2",
"omnibusf4",
"omnibusf4pro",
"omnibusf4pro-bdshot",
"omnibusf4v6",
"OmnibusNanoV6",
"OmnibusNanoV6-bdshot",
"mini-pix",
"airbotf4",
"revo-mini",
"revo-mini-bdshot",
"revo-mini-i2c",
"revo-mini-i2c-bdshot",
"CubeBlack",
"CubeBlack+",
"CubePurple",
"Pixhawk1",
"Pixhawk1-1M",
"Pixhawk4",
"Pix32v5",
"PH4-mini",
"CUAVv5",
"CUAVv5Nano",
"CUAV-Nora",
"CUAV-X7",
"CUAV-X7-bdshot",
"mRoX21",
"Pixracer",
"Pixracer-bdshot",
"F4BY",
"mRoX21-777",
"mRoControlZeroF7",
"mRoNexus",
"mRoPixracerPro",
"mRoPixracerPro-bdshot",
"mRoControlZeroOEMH7",
"mRoControlZeroClassic",
"mRoControlZeroH7",
"mRoControlZeroH7-bdshot",
"F35Lightning",
"speedybeef4",
"SuccexF4",
"DrotekP3Pro",
"VRBrain-v51",
"VRBrain-v52",
"VRUBrain-v51",
"VRCore-v10",
"VRBrain-v54",
"TBS-Colibri-F7",
"Durandal",
"Durandal-bdshot",
"CubeOrange",
"CubeOrange-bdshot",
"CubeYellow",
"R9Pilot",
"QioTekZealotF427",
"BeastH7",
"BeastF7",
"FlywooF745",
"luminousbee5",
# SITL targets
"SITL_x86_64_linux_gnu",
"SITL_arm_linux_gnueabihf",
]
def AP_Periph_boards(self):
'''returns list of boards for AP_Periph'''
return ["f103-GPS",
"f103-ADSB",
"f103-RangeFinder",
"f303-GPS",
"f303-Universal",
"f303-M10025",
"f303-M10070",
"f303-MatekGPS",
"f405-MatekGPS",
"f103-Airspeed",
"CUAV_GPS",
"ZubaxGNSS",
"CubeOrange-periph",
"CubeBlack-periph",
"MatekH743-periph",
"HitecMosaic",
"FreeflyRTK",
"HolybroGPS",
]
def build_arducopter(self, tag):
'''build Copter binaries'''
boards = []
boards.extend(["skyviper-v2450", "aerofc-v1", "bebop", "CubeSolo", "CubeGreen-solo", "skyviper-journey"])
boards.extend(self.common_boards()[:])
self.build_vehicle(tag,
"ArduCopter",
boards,
"Copter",
"arducopter",
"ArduCopter",
frames=[None, "heli"])
def build_arduplane(self, tag):
'''build Plane binaries'''
boards = self.common_boards()[:]
boards.append("disco")
self.build_vehicle(tag,
"ArduPlane",
boards,
"Plane",
"arduplane",
"ArduPlane")
def build_antennatracker(self, tag):
'''build Tracker binaries'''
boards = self.common_boards()[:]
self.build_vehicle(tag,
"AntennaTracker",
boards,
"AntennaTracker",
"antennatracker",
"AntennaTracker",)
def build_rover(self, tag):
'''build Rover binaries'''
boards = self.common_boards()
self.build_vehicle(tag,
"Rover",
boards,
"Rover",
"ardurover",
"Rover")
def build_ardusub(self, tag):
'''build Sub binaries'''
self.build_vehicle(tag,
"ArduSub",
self.common_boards(),
"Sub",
"ardusub",
"ArduSub")
def build_AP_Periph(self, tag):
'''build AP_Periph binaries'''
boards = self.AP_Periph_boards()
self.build_vehicle(tag,
"AP_Periph",
boards,
"AP_Periph",
"AP_Periph",
"AP_Periph")
def generate_manifest(self):
'''generate manigest files for GCS to download'''
self.progress("Generating manifest")
base_url = 'https://firmware.ardupilot.org'
generator = generate_manifest.ManifestGenerator(self.binaries,
base_url)
content = generator.json()
new_json_filepath = os.path.join(self.binaries, "manifest.json.new")
self.write_string_to_filepath(content, new_json_filepath)
# provide a pre-compressed manifest. For reference, a 7M manifest
# "gzip -9"s to 300k in 1 second, "xz -e"s to 80k in 26 seconds
new_json_filepath_gz = os.path.join(self.binaries,
"manifest.json.gz.new")
with gzip.open(new_json_filepath_gz, 'wb') as gf:
if running_python3:
content = bytes(content, 'ascii')
gf.write(content)
json_filepath = os.path.join(self.binaries, "manifest.json")
json_filepath_gz = os.path.join(self.binaries, "manifest.json.gz")
shutil.move(new_json_filepath, json_filepath)
shutil.move(new_json_filepath_gz, json_filepath_gz)
self.progress("Manifest generation successful")
self.progress("Generating stable releases")
gen_stable.make_all_stable(self.binaries)
self.progress("Generate stable releases done")
def validate(self):
'''run pre-run validation checks'''
if "dirty" in self.tags:
if len(self.tags) > 1:
raise ValueError("dirty must be only tag if present (%s)" %
(str(self.tags)))
self.dirty = True
def pollute_env_from_file(self, filepath):
with open(filepath) as f:
for line in f:
try:
(name, value) = str.split(line, "=")
except ValueError as e:
self.progress("%s: split failed: %s" % (filepath, str(e)))
continue
value = value.rstrip()
self.progress("%s: %s=%s" % (filepath, name, value))
os.environ[name] = value
def remove_tmpdir(self):
if os.path.exists(self.tmpdir):
self.progress("Removing (%s)" % (self.tmpdir,))
shutil.rmtree(self.tmpdir)
def buildlogs_dirpath(self):
return os.getenv("BUILDLOGS",
os.path.join(os.getcwd(), "..", "buildlogs"))
def run(self):
self.validate()
prefix_bin_dirpath = os.path.join(os.environ.get('HOME'),
"prefix", "bin")
origin_env_path = os.environ.get("PATH")
os.environ["PATH"] = ':'.join([prefix_bin_dirpath, origin_env_path,
"/bin", "/usr/bin"])
if 'BUILD_BINARIES_PATH' in os.environ:
self.tmpdir = os.environ['BUILD_BINARIES_PATH']
else:
self.tmpdir = os.path.join(os.getcwd(), 'build.tmp.binaries')
os.environ["TMPDIR"] = self.tmpdir
print(self.tmpdir)
self.remove_tmpdir()
self.progress("Building in %s" % self.tmpdir)
now = datetime.datetime.now()
self.progress(now)
if not self.dirty:
self.run_git(["checkout", "-f", "master"])
githash = self.run_git(["rev-parse", "HEAD"])
githash = githash.rstrip()
self.progress("git hash: %s" % str(githash))
self.hdate_ym = now.strftime("%Y-%m")
self.hdate_ymdhm = now.strftime("%Y-%m-%d-%H:%m")
self.mkpath(os.path.join("binaries", self.hdate_ym,
self.hdate_ymdhm))
self.binaries = os.path.join(self.buildlogs_dirpath(), "binaries")
self.basedir = os.getcwd()
self.error_strings = []
if os.path.exists("config.mk"):
# FIXME: narrow exception
self.pollute_env_from_file("config.mk")
if not self.dirty:
self.run_git_update_submodules()
self.buildroot = os.path.join(os.environ.get("TMPDIR"),
"binaries.build")
for tag in self.tags:
t0 = time.time()
self.build_arducopter(tag)
self.build_arduplane(tag)
self.build_rover(tag)
self.build_antennatracker(tag)
self.build_ardusub(tag)
self.build_AP_Periph(tag)
self.history.record_run(githash, tag, t0, time.time()-t0)
if os.path.exists(self.tmpdir):
shutil.rmtree(self.tmpdir)
self.generate_manifest()
for error_string in self.error_strings:
self.progress("%s" % error_string)
sys.exit(len(self.error_strings))
if __name__ == '__main__':
parser = optparse.OptionParser("build_binaries.py")
parser.add_option("", "--tags", action="append", type="string",
default=[], help="tags to build")
cmd_opts, cmd_args = parser.parse_args()
tags = cmd_opts.tags
if len(tags) == 0:
# FIXME: wedge this defaulting into parser somehow
tags = ["stable", "beta", "latest"]
bb = build_binaries(tags)
bb.run()
| gpl-3.0 |
rebeling/pattern | pattern/text/en/wordnet/pywordnet/wntools.py | 21 | 12214 | # Module wordnet.py
#
# Original author: Oliver Steele <steele@osteele.com>
# Project Page: http://sourceforge.net/projects/pywordnet
#
# Copyright (c) 1998-2004 by Oliver Steele. Use is permitted under
# the Artistic License
# <http://www.opensource.org/licenses/artistic-license.html>
"""Utility functions to use with the wordnet module.
Usage
-----
>>> dog = N['dog'][0]
# (First 10) adjectives that are transitively SIMILAR to the main sense of 'red'
>>> closure(ADJ['red'][0], SIMILAR)[:10]
['red' in {adjective: red, reddish, ruddy, blood-red, carmine, cerise, cherry, cherry-red, crimson, ruby, ruby-red, scarlet}, {adjective: chromatic}, {adjective: amber, brownish-yellow, yellow-brown}, {adjective: amethyst}, {adjective: aureate, gilded, gilt, gold, golden}, {adjective: azure, cerulean, sky-blue, bright blue}, {adjective: blue, bluish, blueish, light-blue, dark-blue, blue-black}, {adjective: bluish green, blue-green, cyan, teal}, {adjective: blushful, rosy}, {adjective: bottle-green}]
>>> # Adjectives that are transitively SIMILAR to any of the senses of 'red'
>>> #flatten1(map(lambda sense:closure(sense, SIMILAR), ADJ['red'])) # too verbose
>>> # Hyponyms of the main sense of 'dog'(n.) that are homophonous with verbs
>>> filter(lambda sense:V.get(sense.form), flatten1(map(lambda e:e.getSenses(), hyponyms(N['dog'][0]))))
['dog' in {noun: dog, domestic dog, Canis familiaris}, 'pooch' in {noun: pooch, doggie, doggy, barker, bow-wow}, 'toy' in {noun: toy dog, toy}, 'hound' in {noun: hound, hound dog}, 'basset' in {noun: basset, basset hound}, 'cocker' in {noun: cocker spaniel, English cocker spaniel, cocker}, 'bulldog' in {noun: bulldog, English bulldog}]
>>> # Find the senses of 'raise'(v.) and 'lower'(v.) that are antonyms
>>> filter(lambda p:p[0] in p[1].pointerTargets(ANTONYM), product(V['raise'].getSenses(), V['lower'].getSenses()))
[('raise' in {verb: raise, lift, elevate, get up, bring up}, 'lower' in {verb: lower, take down, let down, get down, bring down})]
"""
__author__ = "Oliver Steele <steele@osteele.com>"
__version__ = "2.0"
from wordnet import *
#
# Domain utilities
#
def _requireSource(entity):
if not hasattr(entity, 'pointers'):
if isinstance(entity, Word):
raise TypeError(`entity` + " is not a Sense or Synset. Try " + `entity` + "[0] instead.")
else:
raise TypeError(`entity` + " is not a Sense or Synset")
def tree(source, pointerType):
"""
>>> dog = N['dog'][0]
>>> from pprint import pprint
>>> pprint(tree(dog, HYPERNYM))
['dog' in {noun: dog, domestic dog, Canis familiaris},
[{noun: canine, canid},
[{noun: carnivore},
[{noun: placental, placental mammal, eutherian, eutherian mammal},
[{noun: mammal},
[{noun: vertebrate, craniate},
[{noun: chordate},
[{noun: animal, animate being, beast, brute, creature, fauna},
[{noun: organism, being},
[{noun: living thing, animate thing},
[{noun: object, physical object}, [{noun: entity}]]]]]]]]]]]]
>>> #pprint(tree(dog, HYPONYM)) # too verbose to include here
"""
if isinstance(source, Word):
return map(lambda s, t=pointerType:tree(s,t), source.getSenses())
_requireSource(source)
return [source] + map(lambda s, t=pointerType:tree(s,t), source.pointerTargets(pointerType))
def closure(source, pointerType, accumulator=None):
"""Return the transitive closure of source under the pointerType
relationship. If source is a Word, return the union of the
closures of its senses.
>>> dog = N['dog'][0]
>>> closure(dog, HYPERNYM)
['dog' in {noun: dog, domestic dog, Canis familiaris}, {noun: canine, canid}, {noun: carnivore}, {noun: placental, placental mammal, eutherian, eutherian mammal}, {noun: mammal}, {noun: vertebrate, craniate}, {noun: chordate}, {noun: animal, animate being, beast, brute, creature, fauna}, {noun: organism, being}, {noun: living thing, animate thing}, {noun: object, physical object}, {noun: entity}]
"""
if isinstance(source, Word):
return reduce(union, map(lambda s, t=pointerType:tree(s,t), source.getSenses()))
_requireSource(source)
if accumulator is None:
accumulator = []
if source not in accumulator:
accumulator.append(source)
for target in source.pointerTargets(pointerType):
closure(target, pointerType, accumulator)
return accumulator
def hyponyms(source):
"""Return source and its hyponyms. If source is a Word, return
the union of the hyponyms of its senses."""
return closure(source, HYPONYM)
def hypernyms(source):
"""Return source and its hypernyms. If source is a Word, return
the union of the hypernyms of its senses."""
return closure(source, HYPERNYM)
def meet(a, b, pointerType=HYPERNYM):
"""Return the meet of a and b under the pointerType relationship.
>>> meet(N['dog'][0], N['cat'][0])
{noun: carnivore}
>>> meet(N['dog'][0], N['person'][0])
{noun: organism, being}
>>> meet(N['thought'][0], N['belief'][0])
{noun: content, cognitive content, mental object}
"""
return (intersection(closure(a, pointerType), closure(b, pointerType)) + [None])[0]
#
# String Utility Functions
#
def startsWith(str, prefix):
"""Return true iff _str_ starts with _prefix_.
>>> startsWith('unclear', 'un')
1
"""
return str[:len(prefix)] == prefix
def endsWith(str, suffix):
"""Return true iff _str_ ends with _suffix_.
>>> endsWith('clearly', 'ly')
1
"""
return str[-len(suffix):] == suffix
def equalsIgnoreCase(a, b):
"""Return true iff a and b have the same lowercase representation.
>>> equalsIgnoreCase('dog', 'Dog')
1
>>> equalsIgnoreCase('dOg', 'DOG')
1
"""
# test a == b first as an optimization where they're equal
return a == b or string.lower(a) == string.lower(b)
#
# Sequence Utility Functions
#
def issequence(item):
"""Return true iff _item_ is a Sequence (a List, String, or Tuple).
>>> issequence((1,2))
1
>>> issequence([1,2])
1
>>> issequence('12')
1
>>> issequence(1)
0
"""
return type(item) in (ListType, StringType, TupleType)
def intersection(u, v):
"""Return the intersection of _u_ and _v_.
>>> intersection((1,2,3), (2,3,4))
[2, 3]
"""
w = []
for e in u:
if e in v:
w.append(e)
return w
def union(u, v):
"""Return the union of _u_ and _v_.
>>> union((1,2,3), (2,3,4))
[1, 2, 3, 4]
"""
w = list(u)
if w is u:
import copy
w = copy.copy(w)
for e in v:
if e not in w:
w.append(e)
return w
def product(u, v):
"""Return the Cartesian product of u and v.
>>> product("123", "abc")
[('1', 'a'), ('1', 'b'), ('1', 'c'), ('2', 'a'), ('2', 'b'), ('2', 'c'), ('3', 'a'), ('3', 'b'), ('3', 'c')]
"""
return flatten1(map(lambda a, v=v:map(lambda b, a=a:(a,b), v), u))
def removeDuplicates(sequence):
"""Return a copy of _sequence_ with equal items removed.
>>> removeDuplicates("this is a test")
['t', 'h', 'i', 's', ' ', 'a', 'e']
>>> removeDuplicates(map(lambda tuple:apply(meet, tuple), product(N['story'].getSenses(), N['joke'].getSenses())))
[{noun: message, content, subject matter, substance}, None, {noun: abstraction}, {noun: communication}]
"""
accumulator = []
for item in sequence:
if item not in accumulator:
accumulator.append(item)
return accumulator
#
# Tree Utility Functions
#
def flatten1(sequence):
accumulator = []
for item in sequence:
if type(item) == TupleType:
item = list(item)
if type(item) == ListType:
accumulator.extend(item)
else:
accumulator.append(item)
return accumulator
#
# WordNet utilities
#
GET_INDEX_SUBSTITUTIONS = ((' ', '-'), ('-', ' '), ('-', ''), (' ', ''), ('.', ''))
def getIndex(form, pos='noun'):
"""Search for _form_ in the index file corresponding to
_pos_. getIndex applies to _form_ an algorithm that replaces
underscores with hyphens, hyphens with underscores, removes
hyphens and underscores, and removes periods in an attempt to find
a form of the string that is an exact match for an entry in the
index file corresponding to _pos_. getWord() is called on each
transformed string until a match is found or all the different
strings have been tried. It returns a Word or None."""
def trySubstitutions(trySubstitutions, form, substitutions, lookup=1, dictionary=dictionaryFor(pos)):
if lookup and dictionary.has_key(form):
return dictionary[form]
elif substitutions:
(old, new) = substitutions[0]
substitute = string.replace(form, old, new) and substitute != form
if substitute and dictionary.has_key(substitute):
return dictionary[substitute]
return trySubstitutions(trySubstitutions, form, substitutions[1:], lookup=0) or \
(substitute and trySubstitutions(trySubstitutions, substitute, substitutions[1:]))
return trySubstitutions(returnMatch, form, GET_INDEX_SUBSTITUTIONS)
MORPHOLOGICAL_SUBSTITUTIONS = {
NOUN:
[('s', ''),
('ses', 's'),
('ves', 'f'),
('xes', 'x'),
('zes', 'z'),
('ches', 'ch'),
('shes', 'sh'),
('men', 'man'),
('ies', 'y')],
VERB:
[('s', ''),
('ies', 'y'),
('es', 'e'),
('es', ''),
('ed', 'e'),
('ed', ''),
('ing', 'e'),
('ing', '')],
ADJECTIVE:
[('er', ''),
('est', ''),
('er', 'e'),
('est', 'e')],
ADVERB: []}
def morphy(form, pos='noun', collect=0):
"""Recursively uninflect _form_, and return the first form found
in the dictionary. If _collect_ is true, a sequence of all forms
is returned, instead of just the first one.
>>> morphy('dogs')
'dog'
>>> morphy('churches')
'church'
>>> morphy('aardwolves')
'aardwolf'
>>> morphy('abaci')
'abacus'
>>> morphy('hardrock', 'adv')
"""
from wordnet import _normalizePOS, _dictionaryFor
pos = _normalizePOS(pos)
fname = os.path.join(WNSEARCHDIR, {NOUN: 'noun', VERB: 'verb', ADJECTIVE: 'adj', ADVERB: 'adv'}[pos] + '.exc')
excfile = open(fname)
substitutions = MORPHOLOGICAL_SUBSTITUTIONS[pos]
def trySubstitutions(trySubstitutions, # workaround for lack of nested closures in Python < 2.1
form, # reduced form
substitutions, # remaining substitutions
lookup=1,
dictionary=_dictionaryFor(pos),
excfile=excfile,
collect=collect,
collection=[]):
import string
exceptions = binarySearchFile(excfile, form)
if exceptions:
form = exceptions[string.find(exceptions, ' ')+1:-1]
if lookup and dictionary.has_key(form):
if collect:
collection.append(form)
else:
return form
elif substitutions:
old, new = substitutions[0]
substitutions = substitutions[1:]
substitute = None
if endsWith(form, old):
substitute = form[:-len(old)] + new
#if dictionary.has_key(substitute):
# return substitute
form = trySubstitutions(trySubstitutions, form, substitutions) or \
(substitute and trySubstitutions(trySubstitutions, substitute, substitutions))
return (collect and collection) or form
elif collect:
return collection
return trySubstitutions(trySubstitutions, form, substitutions)
#
# Testing
#
def _test(reset=0):
import doctest, wntools
if reset:
doctest.master = None # This keeps doctest from complaining after a reload.
return doctest.testmod(wntools)
| bsd-3-clause |
jazztpt/edx-platform | lms/djangoapps/shoppingcart/migrations/0012_auto__del_field_courseregistrationcode_transaction_group_name__del_fie.py | 114 | 17592 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'CourseRegistrationCode.transaction_group_name'
db.delete_column('shoppingcart_courseregistrationcode', 'transaction_group_name')
# Deleting field 'Invoice.purchaser_contact'
db.delete_column('shoppingcart_invoice', 'purchaser_contact')
# Deleting field 'Invoice.purchaser_email'
db.delete_column('shoppingcart_invoice', 'purchaser_email')
# Deleting field 'Invoice.reference'
db.delete_column('shoppingcart_invoice', 'reference')
# Deleting field 'Invoice.purchaser_name'
db.delete_column('shoppingcart_invoice', 'purchaser_name')
# Adding field 'Invoice.company_name'
db.add_column('shoppingcart_invoice', 'company_name',
self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True),
keep_default=False)
# Adding field 'Invoice.course_id'
db.add_column('shoppingcart_invoice', 'course_id',
self.gf('xmodule_django.models.CourseKeyField')(max_length=255, db_index=True),
keep_default=False)
# Adding field 'Invoice.company_contact_name'
db.add_column('shoppingcart_invoice', 'company_contact_name',
self.gf('django.db.models.fields.CharField')(max_length=255),
keep_default=False)
# Adding field 'Invoice.company_contact_email'
db.add_column('shoppingcart_invoice', 'company_contact_email',
self.gf('django.db.models.fields.CharField')(max_length=255),
keep_default=False)
# Adding field 'Invoice.company_reference'
db.add_column('shoppingcart_invoice', 'company_reference',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True),
keep_default=False)
# Adding field 'Invoice.internal_reference'
db.add_column('shoppingcart_invoice', 'internal_reference',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'CourseRegistrationCode.transaction_group_name'
db.add_column('shoppingcart_courseregistrationcode', 'transaction_group_name',
self.gf('django.db.models.fields.CharField')(blank=True, max_length=255, null=True, db_index=True),
keep_default=False)
# Adding field 'Invoice.purchaser_contact'
db.add_column('shoppingcart_invoice', 'purchaser_contact',
self.gf('django.db.models.fields.CharField')(default='', max_length=255),
keep_default=False)
# Adding field 'Invoice.purchaser_email'
db.add_column('shoppingcart_invoice', 'purchaser_email',
self.gf('django.db.models.fields.CharField')(default='', max_length=255),
keep_default=False)
# Adding field 'Invoice.reference'
db.add_column('shoppingcart_invoice', 'reference',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True),
keep_default=False)
# Adding field 'Invoice.purchaser_name'
db.add_column('shoppingcart_invoice', 'purchaser_name',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, db_index=True),
keep_default=False)
# Deleting field 'Invoice.company_name'
db.delete_column('shoppingcart_invoice', 'company_name')
# Deleting field 'Invoice.course_id'
db.delete_column('shoppingcart_invoice', 'course_id')
# Deleting field 'Invoice.company_contact_name'
db.delete_column('shoppingcart_invoice', 'company_contact_name')
# Deleting field 'Invoice.company_contact_email'
db.delete_column('shoppingcart_invoice', 'company_contact_email')
# Deleting field 'Invoice.company_reference'
db.delete_column('shoppingcart_invoice', 'company_reference')
# Deleting field 'Invoice.internal_reference'
db.delete_column('shoppingcart_invoice', 'internal_reference')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.coupon': {
'Meta': {'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 13, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shoppingcart.couponredemption': {
'Meta': {'object_name': 'CouponRedemption'},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.courseregistrationcode': {
'Meta': {'object_name': 'CourseRegistrationCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 13, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'})
},
'shoppingcart.invoice': {
'Meta': {'object_name': 'Invoice'},
'company_contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_contact_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'company_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'tax_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'total_amount': ('django.db.models.fields.FloatField', [], {})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.registrationcoderedemption': {
'Meta': {'object_name': 'RegistrationCodeRedemption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 13, 0, 0)', 'null': 'True'}),
'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
| agpl-3.0 |
cafe-grader-team/cafe-grader-web | lib/assets/Lib/_struct.py | 9 | 15178 | #
# This module is a pure Python version of pypy.module.struct.
# It is only imported if the vastly faster pypy.module.struct is not
# compiled in. For now we keep this version for reference and
# because pypy.module.struct is not ootype-backend-friendly yet.
#
# this module 'borrowed' from
# https://bitbucket.org/pypy/pypy/src/18626459a9b2/lib_pypy/_struct.py?at=py3k-listview_str
# with many bug fixes
"""Functions to convert between Python values and C structs.
Python strings are used to hold the data representing the C struct
and also as format strings to describe the layout of data in the C struct.
The optional first format char indicates byte order, size and alignment:
@: native order, size & alignment (default)
=: native order, std. size & alignment
<: little-endian, std. size & alignment
>: big-endian, std. size & alignment
!: same as >
The remaining chars indicate types of args and must match exactly;
these can be preceded by a decimal repeat count:
x: pad byte (no data);
c:char;
b:signed byte;
B:unsigned byte;
h:short;
H:unsigned short;
i:int;
I:unsigned int;
l:long;
L:unsigned long;
f:float;
d:double.
Special cases (preceding decimal count indicates length):
s:string (array of char); p: pascal string (with count byte).
Special case (only available in native format):
P:an integer type that is wide enough to hold a pointer.
Special case (not in native mode unless 'long long' in platform C):
q:long long;
Q:unsigned long long
Whitespace between formats is ignored.
The variable struct.error is an exception raised on errors."""
import math, sys
# TODO: XXX Find a way to get information on native sizes and alignments
class StructError(Exception):
pass
error = StructError
def unpack_int(data,index,size,le):
bytes = [b for b in data[index:index+size]]
if le == 'little':
bytes.reverse()
number = 0
for b in bytes:
number = number << 8 | b
return int(number)
def unpack_signed_int(data,index,size,le):
number = unpack_int(data,index,size,le)
max = 2**(size*8)
if number > 2**(size*8 - 1) - 1:
number = int(-1*(max - number))
return number
INFINITY = 1e200 * 1e200
NAN = INFINITY / INFINITY
def unpack_char(data,index,size,le):
return data[index:index+size]
def pack_int(number,size,le):
x=number
res=[]
for i in range(size):
res.append(x&0xff)
x >>= 8
if le == 'big':
res.reverse()
return bytes(res)
def pack_signed_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number > 2**(8*size-1)-1 or number < -1*2**(8*size-1):
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_unsigned_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number < 0:
raise TypeError("can't convert negative long to unsigned")
if number > 2**(8*size)-1:
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_char(char,size,le):
return bytes(char)
def isinf(x):
return x != 0.0 and x / 2 == x
def isnan(v):
return v != v*1.0 or (v == 1.0 and v == 2.0)
def pack_float(x, size, le):
unsigned = float_pack(x, size)
result = []
for i in range(size):
result.append((unsigned >> (i * 8)) & 0xFF)
if le == "big":
result.reverse()
return bytes(result)
def unpack_float(data, index, size, le):
binary = [data[i] for i in range(index, index + size)]
if le == "big":
binary.reverse()
unsigned = 0
for i in range(size):
unsigned |= binary[i] << (i * 8)
return float_unpack(unsigned, size, le)
def round_to_nearest(x):
"""Python 3 style round: round a float x to the nearest int, but
unlike the builtin Python 2.x round function:
- return an int, not a float
- do round-half-to-even, not round-half-away-from-zero.
We assume that x is finite and nonnegative; except wrong results
if you use this for negative x.
"""
int_part = int(x)
frac_part = x - int_part
if frac_part > 0.5 or frac_part == 0.5 and int_part & 1 == 1:
int_part += 1
return int_part
def float_unpack(Q, size, le):
"""Convert a 32-bit or 64-bit integer created
by float_pack into a Python float."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
if Q >> BITS:
raise ValueError("input out of range")
# extract pieces
sign = Q >> BITS - 1
exp = (Q & ((1 << BITS - 1) - (1 << MANT_DIG - 1))) >> MANT_DIG - 1
mant = Q & ((1 << MANT_DIG - 1) - 1)
if exp == MAX_EXP - MIN_EXP + 2:
# nan or infinity
result = float('nan') if mant else float('inf')
elif exp == 0:
# subnormal or zero
result = math.ldexp(float(mant), MIN_EXP - MANT_DIG)
else:
# normal
mant += 1 << MANT_DIG - 1
result = math.ldexp(float(mant), exp + MIN_EXP - MANT_DIG - 1)
return -result if sign else result
def float_pack(x, size):
"""Convert a Python float x into a 64-bit unsigned integer
with the same byte representation."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
sign = math.copysign(1.0, x) < 0.0
if math.isinf(x):
mant = 0
exp = MAX_EXP - MIN_EXP + 2
elif math.isnan(x):
mant = 1 << (MANT_DIG-2) # other values possible
exp = MAX_EXP - MIN_EXP + 2
elif x == 0.0:
mant = 0
exp = 0
else:
m, e = math.frexp(abs(x)) # abs(x) == m * 2**e
exp = e - (MIN_EXP - 1)
if exp > 0:
# Normal case.
mant = round_to_nearest(m * (1 << MANT_DIG))
mant -= 1 << MANT_DIG - 1
else:
# Subnormal case.
if exp + MANT_DIG - 1 >= 0:
mant = round_to_nearest(m * (1 << exp + MANT_DIG - 1))
else:
mant = 0
exp = 0
# Special case: rounding produced a MANT_DIG-bit mantissa.
assert 0 <= mant <= 1 << MANT_DIG - 1
if mant == 1 << MANT_DIG - 1:
mant = 0
exp += 1
# Raise on overflow (in some circumstances, may want to return
# infinity instead).
if exp >= MAX_EXP - MIN_EXP + 2:
raise OverflowError("float too large to pack in this format")
# check constraints
assert 0 <= mant < 1 << MANT_DIG - 1
assert 0 <= exp <= MAX_EXP - MIN_EXP + 2
assert 0 <= sign <= 1
return ((sign << BITS - 1) | (exp << MANT_DIG - 1)) | mant
big_endian_format = {
'x':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'b':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'B':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'c':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_char, 'unpack' : unpack_char},
's':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'p':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'h':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'H':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'i':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'I':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'l':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'L':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'Q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'f':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
'd':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
}
default = big_endian_format
formatmode={ '<' : (default, 'little'),
'>' : (default, 'big'),
'!' : (default, 'big'),
'=' : (default, sys.byteorder),
'@' : (default, sys.byteorder)
}
def getmode(fmt):
try:
formatdef,endianness = formatmode[fmt[0]]
alignment = fmt[0] not in formatmode or fmt[0]=='@'
index = 1
except (IndexError, KeyError):
formatdef,endianness = formatmode['@']
alignment = True
index = 0
return formatdef,endianness,index,alignment
def getNum(fmt,i):
num=None
cur = fmt[i]
while ('0'<= cur ) and ( cur <= '9'):
if num == None:
num = int(cur)
else:
num = 10*num + int(cur)
i += 1
cur = fmt[i]
return num,i
def calcsize(fmt):
"""calcsize(fmt) -> int
Return size of C struct described by format string fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i,alignment = getmode(fmt)
num = 0
result = 0
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num != None :
result += num*format['size']
else:
# if formatdef is native, alignment is native, so we count a
# number of padding bytes until result is a multiple of size
if alignment:
result += format['size'] - result % format['size']
result += format['size']
num = 0
i += 1
return result
def pack(fmt,*args):
"""pack(fmt, v1, v2, ...) -> string
Return string containing values v1, v2, ... packed according to fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i,alignment = getmode(fmt)
args = list(args)
n_args = len(args)
result = []
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num == None :
num_s = 0
num = 1
else:
num_s = num
if cur == 'x':
result += [b'\0'*num]
elif cur == 's':
if isinstance(args[0], bytes):
padding = num - len(args[0])
result += [args[0][:num] + b'\0'*padding]
args.pop(0)
else:
raise StructError("arg for string format not a string")
elif cur == 'p':
if isinstance(args[0], bytes):
padding = num - len(args[0]) - 1
if padding > 0:
result += [bytes([len(args[0])]) + args[0][:num-1] + b'\0'*padding]
else:
if num<255:
result += [bytes([num-1]) + args[0][:num-1]]
else:
result += [bytes([255]) + args[0][:num-1]]
args.pop(0)
else:
raise StructError("arg for string format not a string")
else:
if len(args) < num:
raise StructError("insufficient arguments to pack")
for var in args[:num]:
# pad with 0 until position is a multiple of size
if len(result) and alignment:
padding = format['size'] - len(result) % format['size']
result += [bytes([0])]*padding
result += [format['pack'](var,format['size'],endianness)]
args=args[num:]
num = None
i += 1
if len(args) != 0:
raise StructError("too many arguments for pack format")
return b''.join(result)
def unpack(fmt,data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
formatdef,endianness,i,alignment = getmode(fmt)
j = 0
num = 0
result = []
length= calcsize(fmt)
if length != len (data):
raise StructError("unpack str size does not match format")
while i<len(fmt):
num,i=getNum(fmt,i)
cur = fmt[i]
i += 1
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if not num :
num = 1
if cur == 'x':
j += num
elif cur == 's':
result.append(data[j:j+num])
j += num
elif cur == 'p':
n=data[j]
if n >= num:
n = num-1
result.append(data[j+1:j+n+1])
j += num
else:
# skip padding bytes until we get at a multiple of size
if j>0 and alignment:
padding = format['size'] - j % format['size']
j += padding
for n in range(num):
result += [format['unpack'](data,j,format['size'],endianness)]
j += format['size']
return tuple(result)
def pack_into(fmt, buf, offset, *args):
data = pack(fmt, *args)
buf[offset:offset+len(data)] = data
def unpack_from(fmt, buf, offset=0):
size = calcsize(fmt)
data = buf[offset:offset+size]
if len(data) != size:
raise error("unpack_from requires a buffer of at least %d bytes"
% (size,))
return unpack(fmt, data)
def _clearcache():
"Clear the internal cache."
# No cache in this implementation
if __name__=='__main__':
t = pack('Bf',1,2)
print(t, len(t))
print(unpack('Bf', t))
print(calcsize('Bf'))
| mit |
philsch/ansible | test/units/module_utils/facts/hardware/linux_data.py | 71 | 12451 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
LSBLK_OUTPUT = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop1 7c1b0f30-cf34-459f-9a70-2612f82b870a
/dev/loop9 0f031512-ab15-497d-9abd-3a512b4a9390
/dev/loop9 7c1b4444-cf34-459f-9a70-2612f82b870a
/dev/mapper/docker-253:1-1050967-pool
/dev/loop2
/dev/mapper/docker-253:1-1050967-pool
"""
LSBLK_OUTPUT_2 = b"""
/dev/sda
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
/dev/mapper/fedora_dhcp129--186-swap eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d
/dev/mapper/fedora_dhcp129--186-root d34cf5e3-3449-4a6c-8179-a1feb2bca6ce
/dev/mapper/fedora_dhcp129--186-home 2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d
/dev/mapper/an-example-mapper with a space in the name 84639acb-013f-4d2f-9392-526a572b4373
/dev/sr0
/dev/loop0 0f031512-ab15-497d-9abd-3a512b4a9390
"""
LSBLK_UUIDS = {'/dev/sda1': '66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK'}
MTAB = """
sysfs /sys sysfs rw,seclabel,nosuid,nodev,noexec,relatime 0 0
proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
devtmpfs /dev devtmpfs rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755 0 0
securityfs /sys/kernel/security securityfs rw,nosuid,nodev,noexec,relatime 0 0
tmpfs /dev/shm tmpfs rw,seclabel,nosuid,nodev 0 0
devpts /dev/pts devpts rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
tmpfs /run tmpfs rw,seclabel,nosuid,nodev,mode=755 0 0
tmpfs /sys/fs/cgroup tmpfs ro,seclabel,nosuid,nodev,noexec,mode=755 0 0
cgroup /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd 0 0
pstore /sys/fs/pstore pstore rw,seclabel,nosuid,nodev,noexec,relatime 0 0
cgroup /sys/fs/cgroup/devices cgroup rw,nosuid,nodev,noexec,relatime,devices 0 0
cgroup /sys/fs/cgroup/freezer cgroup rw,nosuid,nodev,noexec,relatime,freezer 0 0
cgroup /sys/fs/cgroup/memory cgroup rw,nosuid,nodev,noexec,relatime,memory 0 0
cgroup /sys/fs/cgroup/pids cgroup rw,nosuid,nodev,noexec,relatime,pids 0 0
cgroup /sys/fs/cgroup/blkio cgroup rw,nosuid,nodev,noexec,relatime,blkio 0 0
cgroup /sys/fs/cgroup/cpuset cgroup rw,nosuid,nodev,noexec,relatime,cpuset 0 0
cgroup /sys/fs/cgroup/cpu,cpuacct cgroup rw,nosuid,nodev,noexec,relatime,cpu,cpuacct 0 0
cgroup /sys/fs/cgroup/hugetlb cgroup rw,nosuid,nodev,noexec,relatime,hugetlb 0 0
cgroup /sys/fs/cgroup/perf_event cgroup rw,nosuid,nodev,noexec,relatime,perf_event 0 0
cgroup /sys/fs/cgroup/net_cls,net_prio cgroup rw,nosuid,nodev,noexec,relatime,net_cls,net_prio 0 0
configfs /sys/kernel/config configfs rw,relatime 0 0
/dev/mapper/fedora_dhcp129--186-root / ext4 rw,seclabel,relatime,data=ordered 0 0
selinuxfs /sys/fs/selinux selinuxfs rw,relatime 0 0
systemd-1 /proc/sys/fs/binfmt_misc autofs rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct 0 0
debugfs /sys/kernel/debug debugfs rw,seclabel,relatime 0 0
hugetlbfs /dev/hugepages hugetlbfs rw,seclabel,relatime 0 0
tmpfs /tmp tmpfs rw,seclabel 0 0
mqueue /dev/mqueue mqueue rw,seclabel,relatime 0 0
/dev/loop0 /var/lib/machines btrfs rw,seclabel,relatime,space_cache,subvolid=5,subvol=/ 0 0
/dev/sda1 /boot ext4 rw,seclabel,relatime,data=ordered 0 0
/dev/mapper/fedora_dhcp129--186-home /home ext4 rw,seclabel,relatime,data=ordered 0 0
tmpfs /run/user/1000 tmpfs rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000 0 0
gvfsd-fuse /run/user/1000/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
fusectl /sys/fs/fuse/connections fusectl rw,relatime 0 0
grimlock.g.a: /home/adrian/sshfs-grimlock fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:test_path/path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2 fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
"""
MTAB_ENTRIES = [
[
'sysfs',
'/sys',
'sysfs',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
[
'devtmpfs',
'/dev',
'devtmpfs',
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
'0',
'0'
],
[
'securityfs',
'/sys/kernel/security',
'securityfs',
'rw,nosuid,nodev,noexec,relatime',
'0',
'0'
],
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
[
'devpts',
'/dev/pts',
'devpts',
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
'0',
'0'
],
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
[
'tmpfs',
'/sys/fs/cgroup',
'tmpfs',
'ro,seclabel,nosuid,nodev,noexec,mode=755',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/systemd',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
'0',
'0'
],
[
'pstore',
'/sys/fs/pstore',
'pstore',
'rw,seclabel,nosuid,nodev,noexec,relatime',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/devices',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,devices',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/freezer',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,freezer',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/memory',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,memory',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/pids',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,pids',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/blkio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,blkio',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpuset',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpuset',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/cpu,cpuacct',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/hugetlb',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,hugetlb',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/perf_event',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,perf_event',
'0',
'0'
],
[
'cgroup',
'/sys/fs/cgroup/net_cls,net_prio',
'cgroup',
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
'0',
'0'
],
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-root',
'/',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
[
'systemd-1',
'/proc/sys/fs/binfmt_misc',
'autofs',
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
'0',
'0'
],
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
[
'hugetlbfs',
'/dev/hugepages',
'hugetlbfs',
'rw,seclabel,relatime',
'0',
'0'
],
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
[
'/dev/loop0',
'/var/lib/machines',
'btrfs',
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
'0',
'0'
],
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# A 'none' fstype
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
# lets assume this is a bindmount
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
[
'/dev/mapper/fedora_dhcp129--186-home',
'/home',
'ext4',
'rw,seclabel,relatime,data=ordered',
'0',
'0'
],
[
'tmpfs',
'/run/user/1000',
'tmpfs',
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
'0',
'0'
],
[
'gvfsd-fuse',
'/run/user/1000/gvfs',
'fuse.gvfsd-fuse',
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
'0',
'0'
],
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
STATVFS_INFO = {'/': {'block_available': 10192323,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676405,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747755008,
'size_total': 52710309888},
'/not/a/real/bind_mount': {},
'/home': {'block_available': 1001578731,
'block_size': 4096,
'block_total': 105871006,
'block_used': 5713133,
'inode_available': 26860880,
'inode_total': 26902528,
'inode_used': 41648,
'size_available': 410246647808,
'size_total': 433647640576},
'/var/lib/machines': {'block_available': 10192316,
'block_size': 4096,
'block_total': 12868728,
'block_used': 2676412,
'inode_available': 3061699,
'inode_total': 3276800,
'inode_used': 215101,
'size_available': 41747726336,
'size_total': 52710309888},
'/boot': {'block_available': 187585,
'block_size': 4096,
'block_total': 249830,
'block_used': 62245,
'inode_available': 65096,
'inode_total': 65536,
'inode_used': 440,
'size_available': 768348160,
'size_total': 1023303680}
}
# ['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
BIND_MOUNTS = ['/not/a/real/bind_mount']
| gpl-3.0 |
mcameron/ansible-modules-extras | monitoring/zabbix_group.py | 89 | 6725 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: zabbix_group
short_description: Zabbix host groups creates/deletes
description:
- Create host groups if they do not exist.
- Delete existing host groups if they exist.
version_added: "1.8"
author:
- "(@cove)"
- "Tony Minfei Ding"
- "Harrison Gu (@harrisongu)"
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
C(url) is an alias for C(server_url).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name.
required: true
login_password:
description:
- Zabbix user password.
required: true
state:
description:
- Create or delete host group.
required: false
default: "present"
choices: [ "present", "absent" ]
timeout:
description:
- The timeout of API request(seconds).
default: 10
host_groups:
description:
- List of host groups to create or delete.
required: true
aliases: [ "host_group" ]
notes:
- Too many concurrent updates to the same group may cause Zabbix to return errors, see examples for a workaround if needed.
'''
EXAMPLES = '''
# Base create host groups example
- name: Create host groups
local_action:
module: zabbix_group
server_url: http://monitor.example.com
login_user: username
login_password: password
state: present
host_groups:
- Example group1
- Example group2
# Limit the Zabbix group creations to one host since Zabbix can return an error when doing concurent updates
- name: Create host groups
local_action:
module: zabbix_group
server_url: http://monitor.example.com
login_user: username
login_password: password
state: present
host_groups:
- Example group1
- Example group2
when: inventory_hostname==groups['group_name'][0]
'''
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
from zabbix_api import Already_Exists
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
class HostGroup(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# create host group(s) if not exists
def create_host_group(self, group_names):
try:
group_add_list = []
for group_name in group_names:
result = self._zapi.hostgroup.exists({'name': group_name})
if not result:
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.hostgroup.create({'name': group_name})
group_add_list.append(group_name)
except Already_Exists:
return group_add_list
return group_add_list
except Exception, e:
self._module.fail_json(msg="Failed to create host group(s): %s" % e)
# delete host group(s)
def delete_host_group(self, group_ids):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.hostgroup.delete(group_ids)
except Exception, e:
self._module.fail_json(msg="Failed to delete host group(s), Exception: %s" % e)
# get group ids by name
def get_group_ids(self, host_groups):
group_ids = []
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': host_groups}})
for group in group_list:
group_id = group['groupid']
group_ids.append(group_id)
return group_ids, group_list
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True, aliases=['url']),
login_user=dict(required=True),
login_password=dict(required=True, no_log=True),
host_groups=dict(required=True, aliases=['host_group']),
state=dict(default="present", choices=['present','absent']),
timeout=dict(type='int', default=10)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing requried zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
host_groups = module.params['host_groups']
state = module.params['state']
timeout = module.params['timeout']
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout)
zbx.login(login_user, login_password)
except Exception, e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
hostGroup = HostGroup(module, zbx)
group_ids = []
group_list = []
if host_groups:
group_ids, group_list = hostGroup.get_group_ids(host_groups)
if state == "absent":
# delete host groups
if group_ids:
delete_group_names = []
hostGroup.delete_host_group(group_ids)
for group in group_list:
delete_group_names.append(group['name'])
module.exit_json(changed=True,
result="Successfully deleted host group(s): %s." % ",".join(delete_group_names))
else:
module.exit_json(changed=False, result="No host group(s) to delete.")
else:
# create host groups
group_add_list = hostGroup.create_host_group(host_groups)
if len(group_add_list) > 0:
module.exit_json(changed=True, result="Successfully created host group(s): %s" % group_add_list)
else:
module.exit_json(changed=False)
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
h4ck3rm1k3/FEC-Field-Documentation | fec/version/v2/SF.py | 1 | 2370 | import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'HAS FILER BEEN DESIG TO MAKE CORD. EXPEND.', 'number': '3-'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '4'},
{'name': 'DESIGNATING COMMITTEES. COMMITTEE NAME', 'number': '5'},
{'name': 'FEC COMMITTEE ID NUMBER SUBORDINATE CMTE', 'number': '6'},
{'name': 'COMMITTEE NAME', 'number': '7'},
{'name': 'STREET 1', 'number': '8'},
{'name': 'STREET 2', 'number': '9'},
{'name': 'CITY', 'number': '10'},
{'name': 'STATE', 'number': '11'},
{'name': 'ZIP', 'number': '12'},
{'name': 'ENTITY TYPE', 'number': '13'},
{'name': 'NAME (Payee)', 'number': '14'},
{'name': 'STREET 1', 'number': '15'},
{'name': 'STREET 2', 'number': '16'},
{'name': 'CITY', 'number': '17'},
{'name': 'STATE', 'number': '18'},
{'name': 'ZIP', 'number': '19'},
{'name': 'AGG GEN ELE AMOUNT EXPENDED', 'number': '20'},
{'name': 'TRANSDESC', 'number': '21'},
{'name': 'DATE', 'number': '22'},
{'name': 'AMOUNT EXPENDED', 'number': '23'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '24'},
{'name': 'FEC CANDIDATE ID NUMBER', 'number': '25'},
{'name': 'CANDIDATE NAME', 'number': '26'},
{'name': 'CAN/OFF', 'number': '27'},
{'name': 'STATE (OF ELECTION)', 'number': '28'},
{'name': 'CAN/DIST', 'number': '29'},
{'name': 'CONDUIT-NAME', 'number': '30'},
{'name': 'CONDUIT-STREET-1', 'number': '31'},
{'name': 'CONDUIT-STREET-2', 'number': '32'},
{'name': 'CONDUIT-CITY', 'number': '33'},
{'name': 'CONDUIT-STATE', 'number': '34'},
{'name': 'CONDUIT-ZIP', 'number': '35'},
{'name': 'AMENDED-CD', 'number': '36'},
{'name': 'TRAN_ID', 'number': '37'},
{'name': 'ORIG_TRAN_ID', 'number': '38'},
{'name': 'SUPR_TRAN_ID', 'number': '39'},
]
self.fields_names = self.hash_names(self.fields)
| unlicense |
mdblv2/joatu-django | application/site-packages/django/db/backends/mysql/creation.py | 210 | 3064 | from django.db.backends.creation import BaseDatabaseCreation
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
suffix = []
if self.connection.settings_dict['TEST_CHARSET']:
suffix.append('CHARACTER SET %s' % self.connection.settings_dict['TEST_CHARSET'])
if self.connection.settings_dict['TEST_COLLATION']:
suffix.append('COLLATE %s' % self.connection.settings_dict['TEST_COLLATION'])
return ' '.join(suffix)
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"All inline references are pending under MySQL"
return [], True
def sql_for_inline_many_to_many_references(self, model, field, style):
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL')),
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL'))
]
deferred = [
(field.m2m_db_table(), field.m2m_column_name(), opts.db_table,
opts.pk.column),
(field.m2m_db_table(), field.m2m_reverse_name(),
field.rel.to._meta.db_table, field.rel.to._meta.pk.column)
]
return table_output, deferred
| apache-2.0 |
popazerty/enigma2-obh | keyids.py | 53 | 5285 | KEYIDS = {
"KEY_RESERVED": 0,
"KEY_ESC": 1,
"KEY_1": 2,
"KEY_2": 3,
"KEY_3": 4,
"KEY_4": 5,
"KEY_5": 6,
"KEY_6": 7,
"KEY_7": 8,
"KEY_8": 9,
"KEY_9": 10,
"KEY_0": 11,
"KEY_MINUS": 12,
"KEY_EQUAL": 13,
"KEY_BACKSPACE": 14,
"KEY_TAB": 15,
"KEY_Q": 16,
"KEY_W": 17,
"KEY_E": 18,
"KEY_R": 19,
"KEY_T": 20,
"KEY_Y": 21,
"KEY_U": 22,
"KEY_I": 23,
"KEY_O": 24,
"KEY_P": 25,
"KEY_LEFTBRACE": 26,
"KEY_RIGHTBRACE": 27,
"KEY_ENTER": 28,
"KEY_LEFTCTRL": 29,
"KEY_A": 30,
"KEY_S": 31,
"KEY_D": 32,
"KEY_F": 33,
"KEY_G": 34,
"KEY_H": 35,
"KEY_J": 36,
"KEY_K": 37,
"KEY_L": 38,
"KEY_SEMICOLON": 39,
"KEY_APOSTROPHE": 40,
"KEY_GRAVE": 41,
"KEY_LEFTSHIFT": 42,
"KEY_BACKSLASH": 43,
"KEY_Z": 44,
"KEY_X": 45,
"KEY_C": 46,
"KEY_V": 47,
"KEY_B": 48,
"KEY_N": 49,
"KEY_M": 50,
"KEY_COMMA": 51,
"KEY_DOT": 52,
"KEY_SLASH": 53,
"KEY_RIGHTSHIFT": 54,
"KEY_KPASTERISK": 55,
"KEY_LEFTALT": 56,
"KEY_SPACE": 57,
"KEY_CAPSLOCK": 58,
"KEY_F1": 59,
"KEY_F2": 60,
"KEY_F3": 61,
"KEY_F4": 62,
"KEY_F5": 63,
"KEY_F6": 64,
"KEY_F7": 65,
"KEY_F8": 66,
"KEY_F9": 67,
"KEY_F10": 68,
"KEY_NUMLOCK": 69,
"KEY_SCROLLLOCK": 70,
"KEY_KP7": 71,
"KEY_KP8": 72,
"KEY_KP9": 73,
"KEY_KPMINUS": 74,
"KEY_KP4": 75,
"KEY_KP5": 76,
"KEY_KP6": 77,
"KEY_KPPLUS": 78,
"KEY_KP1": 79,
"KEY_KP2": 80,
"KEY_KP3": 81,
"KEY_KP0": 82,
"KEY_KPDOT": 83,
"KEY_103RD": 84,
"KEY_F13": 85,
"KEY_102ND": 86,
"KEY_F11": 87,
"KEY_F12": 88,
"KEY_F14": 89,
"KEY_F15": 90,
"KEY_F16": 91,
"KEY_F17": 92,
"KEY_F18": 93,
"KEY_F19": 94,
"KEY_F20": 95,
"KEY_KPENTER": 96,
"KEY_RIGHTCTRL": 97,
"KEY_KPSLASH": 98,
"KEY_SYSRQ": 99,
"KEY_RIGHTALT": 100,
"KEY_LINEFEED": 101,
"KEY_HOME": 102,
"KEY_UP": 103,
"KEY_PAGEUP": 104,
"KEY_LEFT": 105,
"KEY_RIGHT": 106,
"KEY_END": 107,
"KEY_DOWN": 108,
"KEY_PAGEDOWN": 109,
"KEY_INSERT": 110,
"KEY_DELETE": 111,
"KEY_MACRO": 112,
"KEY_MUTE": 113,
"KEY_VOLUMEDOWN": 114,
"KEY_VOLUMEUP": 115,
"KEY_POWER": 116,
"KEY_KPEQUAL": 117,
"KEY_KPPLUSMINUS": 118,
"KEY_PAUSE": 119,
"KEY_F21": 120,
"KEY_F22": 121,
"KEY_F23": 122,
"KEY_F24": 123,
"KEY_KPCOMMA": 124,
"KEY_LEFTMETA": 125,
"KEY_RIGHTMETA": 126,
"KEY_COMPOSE": 127,
"KEY_STOP": 128,
"KEY_AGAIN": 129,
"KEY_PROPS": 130,
"KEY_UNDO": 131,
"KEY_FRONT": 132,
"KEY_COPY": 133,
"KEY_OPEN": 134,
"KEY_PASTE": 135,
"KEY_FIND": 136,
"KEY_CUT": 137,
"KEY_HELP": 138,
"KEY_MENU": 139,
"KEY_CALC": 140,
"KEY_SETUP": 141,
"KEY_SLEEP": 142,
"KEY_WAKEUP": 143,
"KEY_FILE": 144,
"KEY_SENDFILE": 145,
"KEY_DELETEFILE": 146,
"KEY_XFER": 147,
"KEY_PROG1": 148,
"KEY_PROG2": 149,
"KEY_WWW": 150,
"KEY_MSDOS": 151,
"KEY_COFFEE": 152,
"KEY_DIRECTION": 153,
"KEY_CYCLEWINDOWS": 154,
"KEY_MAIL": 155,
"KEY_BOOKMARKS": 156,
"KEY_COMPUTER": 157,
"KEY_BACK": 158,
"KEY_FORWARD": 159,
"KEY_CLOSECD": 160,
"KEY_EJECTCD": 161,
"KEY_EJECTCLOSECD": 162,
"KEY_NEXTSONG": 163,
"KEY_PLAYPAUSE": 164,
"KEY_PREVIOUSSONG": 165,
"KEY_STOPCD": 166,
"KEY_RECORD": 167,
"KEY_REWIND": 168,
"KEY_PHONE": 169,
"KEY_ISO": 170,
"KEY_CONFIG": 171,
"KEY_HOMEPAGE": 172,
"KEY_REFRESH": 173,
"KEY_EXIT": 174,
"KEY_MOVE": 175,
"KEY_EDIT": 176,
"KEY_SCROLLUP": 177,
"KEY_SCROLLDOWN": 178,
"KEY_KPLEFTPAREN": 179,
"KEY_KPRIGHTPAREN": 180,
"KEY_INTL1": 181,
"KEY_INTL2": 182,
"KEY_INTL3": 183,
"KEY_INTL4": 184,
"KEY_INTL5": 185,
"KEY_INTL6": 186,
"KEY_INTL7": 187,
"KEY_INTL8": 188,
"KEY_INTL9": 189,
"KEY_LANG1": 190,
"KEY_LANG2": 191,
"KEY_LANG3": 192,
"KEY_LANG4": 193,
"KEY_LANG5": 194,
"KEY_LANG6": 195,
"KEY_LANG7": 196,
"KEY_LANG8": 197,
"KEY_LANG9": 198,
"KEY_PLAYCD": 200,
"KEY_PAUSECD": 201,
"KEY_PROG3": 202,
"KEY_PROG4": 203,
"KEY_SUSPEND": 205,
"KEY_CLOSE": 206,
"KEY_PLAY": 207,
"KEY_FASTFORWARD": 208,
"KEY_BASSBOOST": 209,
"KEY_PRINT": 210,
"KEY_HP": 211,
"KEY_CAMERA": 212,
"KEY_SOUND": 213,
"KEY_QUESTION": 214,
"KEY_EMAIL": 215,
"KEY_CHAT": 216,
"KEY_SEARCH": 217,
"KEY_CONNECT": 218,
"KEY_FINANCE": 219,
"KEY_SPORT": 220,
"KEY_SHOP": 221,
"KEY_ALTERASE": 222,
"KEY_CANCEL": 223,
"KEY_BRIGHTNESSDOWN": 224,
"KEY_BRIGHTNESSUP": 225,
"KEY_MEDIA": 226,
"KEY_VMODE": 227,
"KEY_UNKNOWN": 240,
"KEY_OK": 352,
"KEY_SELECT": 353,
"KEY_GOTO": 354,
"KEY_CLEAR": 355,
"KEY_POWER2": 356,
"KEY_OPTION": 357,
"KEY_INFO": 358,
"KEY_TIME": 359,
"KEY_VENDOR": 360,
"KEY_ARCHIVE": 361,
"KEY_PROGRAM": 362,
"KEY_CHANNEL": 363,
"KEY_FAVORITES": 364,
"KEY_EPG": 365,
"KEY_PVR": 366,
"KEY_MHP": 367,
"KEY_LANGUAGE": 368,
"KEY_TITLE": 369,
"KEY_SUBTITLE": 370,
"KEY_ANGLE": 371,
"KEY_ZOOM": 372,
"KEY_MODE": 373,
"KEY_KEYBOARD": 374,
"KEY_SCREEN": 375,
"KEY_PC": 376,
"KEY_TV": 377,
"KEY_TV2": 378,
"KEY_VCR": 379,
"KEY_VCR2": 380,
"KEY_SAT": 381,
"KEY_SAT2": 382,
"KEY_CD": 383,
"KEY_TAPE": 384,
"KEY_RADIO": 385,
"KEY_TUNER": 386,
"KEY_PLAYER": 387,
"KEY_TEXT": 388,
"KEY_DVD": 389,
"KEY_AUX": 390,
"KEY_MP3": 391,
"KEY_AUDIO": 392,
"KEY_VIDEO": 393,
"KEY_DIRECTORY": 394,
"KEY_LIST": 395,
"KEY_MEMO": 396,
"KEY_CALENDAR": 397,
"KEY_RED": 398,
"KEY_GREEN": 399,
"KEY_YELLOW": 400,
"KEY_BLUE": 401,
"KEY_CHANNELUP": 402,
"KEY_CHANNELDOWN": 403,
"KEY_FIRST": 404,
"KEY_LAST": 405,
"KEY_AB": 406,
"KEY_NEXT": 407,
"KEY_RESTART": 408,
"KEY_SLOW": 409,
"KEY_SHUFFLE": 410,
"KEY_BREAK": 411,
"KEY_PREVIOUS": 412,
"KEY_DIGITS": 413,
"KEY_TEEN": 414,
"KEY_TWEN": 415,
"KEY_CONTEXT_MENU": 438,
"KEY_DEL_EOL": 448,
"KEY_DEL_EOS": 449,
"KEY_INS_LINE": 450,
"KEY_DEL_LINE": 451,
"KEY_ASCII": 510,
"KEY_MAX": 511,
"BTN_0": 256,
"BTN_1": 257,
}
| gpl-2.0 |
gylian/headphones | lib/unidecode/x0af.py | 253 | 5012 | data = (
'ggyeols', # 0x00
'ggyeolt', # 0x01
'ggyeolp', # 0x02
'ggyeolh', # 0x03
'ggyeom', # 0x04
'ggyeob', # 0x05
'ggyeobs', # 0x06
'ggyeos', # 0x07
'ggyeoss', # 0x08
'ggyeong', # 0x09
'ggyeoj', # 0x0a
'ggyeoc', # 0x0b
'ggyeok', # 0x0c
'ggyeot', # 0x0d
'ggyeop', # 0x0e
'ggyeoh', # 0x0f
'ggye', # 0x10
'ggyeg', # 0x11
'ggyegg', # 0x12
'ggyegs', # 0x13
'ggyen', # 0x14
'ggyenj', # 0x15
'ggyenh', # 0x16
'ggyed', # 0x17
'ggyel', # 0x18
'ggyelg', # 0x19
'ggyelm', # 0x1a
'ggyelb', # 0x1b
'ggyels', # 0x1c
'ggyelt', # 0x1d
'ggyelp', # 0x1e
'ggyelh', # 0x1f
'ggyem', # 0x20
'ggyeb', # 0x21
'ggyebs', # 0x22
'ggyes', # 0x23
'ggyess', # 0x24
'ggyeng', # 0x25
'ggyej', # 0x26
'ggyec', # 0x27
'ggyek', # 0x28
'ggyet', # 0x29
'ggyep', # 0x2a
'ggyeh', # 0x2b
'ggo', # 0x2c
'ggog', # 0x2d
'ggogg', # 0x2e
'ggogs', # 0x2f
'ggon', # 0x30
'ggonj', # 0x31
'ggonh', # 0x32
'ggod', # 0x33
'ggol', # 0x34
'ggolg', # 0x35
'ggolm', # 0x36
'ggolb', # 0x37
'ggols', # 0x38
'ggolt', # 0x39
'ggolp', # 0x3a
'ggolh', # 0x3b
'ggom', # 0x3c
'ggob', # 0x3d
'ggobs', # 0x3e
'ggos', # 0x3f
'ggoss', # 0x40
'ggong', # 0x41
'ggoj', # 0x42
'ggoc', # 0x43
'ggok', # 0x44
'ggot', # 0x45
'ggop', # 0x46
'ggoh', # 0x47
'ggwa', # 0x48
'ggwag', # 0x49
'ggwagg', # 0x4a
'ggwags', # 0x4b
'ggwan', # 0x4c
'ggwanj', # 0x4d
'ggwanh', # 0x4e
'ggwad', # 0x4f
'ggwal', # 0x50
'ggwalg', # 0x51
'ggwalm', # 0x52
'ggwalb', # 0x53
'ggwals', # 0x54
'ggwalt', # 0x55
'ggwalp', # 0x56
'ggwalh', # 0x57
'ggwam', # 0x58
'ggwab', # 0x59
'ggwabs', # 0x5a
'ggwas', # 0x5b
'ggwass', # 0x5c
'ggwang', # 0x5d
'ggwaj', # 0x5e
'ggwac', # 0x5f
'ggwak', # 0x60
'ggwat', # 0x61
'ggwap', # 0x62
'ggwah', # 0x63
'ggwae', # 0x64
'ggwaeg', # 0x65
'ggwaegg', # 0x66
'ggwaegs', # 0x67
'ggwaen', # 0x68
'ggwaenj', # 0x69
'ggwaenh', # 0x6a
'ggwaed', # 0x6b
'ggwael', # 0x6c
'ggwaelg', # 0x6d
'ggwaelm', # 0x6e
'ggwaelb', # 0x6f
'ggwaels', # 0x70
'ggwaelt', # 0x71
'ggwaelp', # 0x72
'ggwaelh', # 0x73
'ggwaem', # 0x74
'ggwaeb', # 0x75
'ggwaebs', # 0x76
'ggwaes', # 0x77
'ggwaess', # 0x78
'ggwaeng', # 0x79
'ggwaej', # 0x7a
'ggwaec', # 0x7b
'ggwaek', # 0x7c
'ggwaet', # 0x7d
'ggwaep', # 0x7e
'ggwaeh', # 0x7f
'ggoe', # 0x80
'ggoeg', # 0x81
'ggoegg', # 0x82
'ggoegs', # 0x83
'ggoen', # 0x84
'ggoenj', # 0x85
'ggoenh', # 0x86
'ggoed', # 0x87
'ggoel', # 0x88
'ggoelg', # 0x89
'ggoelm', # 0x8a
'ggoelb', # 0x8b
'ggoels', # 0x8c
'ggoelt', # 0x8d
'ggoelp', # 0x8e
'ggoelh', # 0x8f
'ggoem', # 0x90
'ggoeb', # 0x91
'ggoebs', # 0x92
'ggoes', # 0x93
'ggoess', # 0x94
'ggoeng', # 0x95
'ggoej', # 0x96
'ggoec', # 0x97
'ggoek', # 0x98
'ggoet', # 0x99
'ggoep', # 0x9a
'ggoeh', # 0x9b
'ggyo', # 0x9c
'ggyog', # 0x9d
'ggyogg', # 0x9e
'ggyogs', # 0x9f
'ggyon', # 0xa0
'ggyonj', # 0xa1
'ggyonh', # 0xa2
'ggyod', # 0xa3
'ggyol', # 0xa4
'ggyolg', # 0xa5
'ggyolm', # 0xa6
'ggyolb', # 0xa7
'ggyols', # 0xa8
'ggyolt', # 0xa9
'ggyolp', # 0xaa
'ggyolh', # 0xab
'ggyom', # 0xac
'ggyob', # 0xad
'ggyobs', # 0xae
'ggyos', # 0xaf
'ggyoss', # 0xb0
'ggyong', # 0xb1
'ggyoj', # 0xb2
'ggyoc', # 0xb3
'ggyok', # 0xb4
'ggyot', # 0xb5
'ggyop', # 0xb6
'ggyoh', # 0xb7
'ggu', # 0xb8
'ggug', # 0xb9
'ggugg', # 0xba
'ggugs', # 0xbb
'ggun', # 0xbc
'ggunj', # 0xbd
'ggunh', # 0xbe
'ggud', # 0xbf
'ggul', # 0xc0
'ggulg', # 0xc1
'ggulm', # 0xc2
'ggulb', # 0xc3
'gguls', # 0xc4
'ggult', # 0xc5
'ggulp', # 0xc6
'ggulh', # 0xc7
'ggum', # 0xc8
'ggub', # 0xc9
'ggubs', # 0xca
'ggus', # 0xcb
'gguss', # 0xcc
'ggung', # 0xcd
'gguj', # 0xce
'gguc', # 0xcf
'gguk', # 0xd0
'ggut', # 0xd1
'ggup', # 0xd2
'gguh', # 0xd3
'ggweo', # 0xd4
'ggweog', # 0xd5
'ggweogg', # 0xd6
'ggweogs', # 0xd7
'ggweon', # 0xd8
'ggweonj', # 0xd9
'ggweonh', # 0xda
'ggweod', # 0xdb
'ggweol', # 0xdc
'ggweolg', # 0xdd
'ggweolm', # 0xde
'ggweolb', # 0xdf
'ggweols', # 0xe0
'ggweolt', # 0xe1
'ggweolp', # 0xe2
'ggweolh', # 0xe3
'ggweom', # 0xe4
'ggweob', # 0xe5
'ggweobs', # 0xe6
'ggweos', # 0xe7
'ggweoss', # 0xe8
'ggweong', # 0xe9
'ggweoj', # 0xea
'ggweoc', # 0xeb
'ggweok', # 0xec
'ggweot', # 0xed
'ggweop', # 0xee
'ggweoh', # 0xef
'ggwe', # 0xf0
'ggweg', # 0xf1
'ggwegg', # 0xf2
'ggwegs', # 0xf3
'ggwen', # 0xf4
'ggwenj', # 0xf5
'ggwenh', # 0xf6
'ggwed', # 0xf7
'ggwel', # 0xf8
'ggwelg', # 0xf9
'ggwelm', # 0xfa
'ggwelb', # 0xfb
'ggwels', # 0xfc
'ggwelt', # 0xfd
'ggwelp', # 0xfe
'ggwelh', # 0xff
)
| gpl-3.0 |
markslwong/tensorflow | tensorflow/python/ops/gradients.py | 138 | 1417 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implements the graph generation for computation of gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.ops.gradients_impl import AggregationMethod
from tensorflow.python.ops.gradients_impl import gradients
from tensorflow.python.ops.gradients_impl import hessians
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
# TODO(drpng): find a good place to reference this.
"AggregationMethod",
"gradients", # tf.gradients.gradients.
"hessians", # tf.gradients.hessians
]
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 |
wudafucode/flask | flask/logging.py | 194 | 2683 | # -*- coding: utf-8 -*-
"""
flask.logging
~~~~~~~~~~~~~
Implements the logging support for Flask.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from werkzeug.local import LocalProxy
from logging import getLogger, StreamHandler, Formatter, getLoggerClass, \
DEBUG, ERROR
from .globals import _request_ctx_stack
PROD_LOG_FORMAT = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'
DEBUG_LOG_FORMAT = (
'-' * 80 + '\n' +
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
'%(message)s\n' +
'-' * 80
)
@LocalProxy
def _proxy_stream():
"""Finds the most appropriate error stream for the application. If a
WSGI request is in flight we log to wsgi.errors, otherwise this resolves
to sys.stderr.
"""
ctx = _request_ctx_stack.top
if ctx is not None:
return ctx.request.environ['wsgi.errors']
return sys.stderr
def _should_log_for(app, mode):
policy = app.config['LOGGER_HANDLER_POLICY']
if policy == mode or policy == 'always':
return True
return False
def create_logger(app):
"""Creates a logger for the given application. This logger works
similar to a regular Python logger but changes the effective logging
level based on the application's debug flag. Furthermore this
function also removes all attached handlers in case there was a
logger with the log name before.
"""
Logger = getLoggerClass()
class DebugLogger(Logger):
def getEffectiveLevel(self):
if self.level == 0 and app.debug:
return DEBUG
return Logger.getEffectiveLevel(self)
class DebugHandler(StreamHandler):
def emit(self, record):
if app.debug and _should_log_for(app, 'debug'):
StreamHandler.emit(self, record)
class ProductionHandler(StreamHandler):
def emit(self, record):
if not app.debug and _should_log_for(app, 'production'):
StreamHandler.emit(self, record)
debug_handler = DebugHandler()
debug_handler.setLevel(DEBUG)
debug_handler.setFormatter(Formatter(DEBUG_LOG_FORMAT))
prod_handler = ProductionHandler(_proxy_stream)
prod_handler.setLevel(ERROR)
prod_handler.setFormatter(Formatter(PROD_LOG_FORMAT))
logger = getLogger(app.logger_name)
# just in case that was not a new logger, get rid of all the handlers
# already attached to it.
del logger.handlers[:]
logger.__class__ = DebugLogger
logger.addHandler(debug_handler)
logger.addHandler(prod_handler)
return logger
| bsd-3-clause |
kashif/scikit-learn | sklearn/utils/tests/test_extmath.py | 19 | 21979 | # Authors: Olivier Grisel <olivier.grisel@ensta.org>
# Mathieu Blondel <mathieu@mblondel.org>
# Denis Engemann <d.engemann@fz-juelich.de>
#
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from scipy import linalg
from scipy import stats
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import skip_if_32bit
from sklearn.utils.extmath import density
from sklearn.utils.extmath import logsumexp
from sklearn.utils.extmath import norm, squared_norm
from sklearn.utils.extmath import randomized_svd
from sklearn.utils.extmath import row_norms
from sklearn.utils.extmath import weighted_mode
from sklearn.utils.extmath import cartesian
from sklearn.utils.extmath import log_logistic
from sklearn.utils.extmath import fast_dot, _fast_dot
from sklearn.utils.extmath import svd_flip
from sklearn.utils.extmath import _incremental_mean_and_var
from sklearn.utils.extmath import _deterministic_vector_sign_flip
from sklearn.utils.extmath import softmax
from sklearn.datasets.samples_generator import make_low_rank_matrix
def test_density():
rng = np.random.RandomState(0)
X = rng.randint(10, size=(10, 5))
X[1, 2] = 0
X[5, 3] = 0
X_csr = sparse.csr_matrix(X)
X_csc = sparse.csc_matrix(X)
X_coo = sparse.coo_matrix(X)
X_lil = sparse.lil_matrix(X)
for X_ in (X_csr, X_csc, X_coo, X_lil):
assert_equal(density(X_), density(X))
def test_uniform_weights():
# with uniform weights, results should be identical to stats.mode
rng = np.random.RandomState(0)
x = rng.randint(10, size=(10, 5))
weights = np.ones(x.shape)
for axis in (None, 0, 1):
mode, score = stats.mode(x, axis)
mode2, score2 = weighted_mode(x, weights, axis)
assert_true(np.all(mode == mode2))
assert_true(np.all(score == score2))
def test_random_weights():
# set this up so that each row should have a weighted mode of 6,
# with a score that is easily reproduced
mode_result = 6
rng = np.random.RandomState(0)
x = rng.randint(mode_result, size=(100, 10))
w = rng.random_sample(x.shape)
x[:, :5] = mode_result
w[:, :5] += 1
mode, score = weighted_mode(x, w, axis=1)
assert_array_equal(mode, mode_result)
assert_array_almost_equal(score.ravel(), w[:, :5].sum(1))
def test_logsumexp():
# Try to add some smallish numbers in logspace
x = np.array([1e-40] * 1000000)
logx = np.log(x)
assert_almost_equal(np.exp(logsumexp(logx)), x.sum())
X = np.vstack([x, x])
logX = np.vstack([logx, logx])
assert_array_almost_equal(np.exp(logsumexp(logX, axis=0)), X.sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, axis=1)), X.sum(axis=1))
def test_randomized_svd_low_rank():
# Check that extmath.randomized_svd is consistent with linalg.svd
n_samples = 100
n_features = 500
rank = 5
k = 10
# generate a matrix X of approximate effective rank `rank` and no noise
# component (very structured signal):
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.0,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
U, s, V = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'none', 'LU', 'QR']:
# compute the singular values of X using the fast approximate method
Ua, sa, Va = \
randomized_svd(X, k, power_iteration_normalizer=normalizer)
assert_equal(Ua.shape, (n_samples, k))
assert_equal(sa.shape, (k,))
assert_equal(Va.shape, (k, n_features))
# ensure that the singular values of both methods are equal up to the
# real rank of the matrix
assert_almost_equal(s[:k], sa)
# check the singular vectors too (while not checking the sign)
assert_almost_equal(np.dot(U[:, :k], V[:k, :]), np.dot(Ua, Va))
# check the sparse matrix representation
X = sparse.csr_matrix(X)
# compute the singular values of X using the fast approximate method
Ua, sa, Va = \
randomized_svd(X, k, power_iteration_normalizer=normalizer)
assert_almost_equal(s[:rank], sa[:rank])
def test_norm_squared_norm():
X = np.random.RandomState(42).randn(50, 63)
X *= 100 # check stability
X += 200
assert_almost_equal(np.linalg.norm(X.ravel()), norm(X))
assert_almost_equal(norm(X) ** 2, squared_norm(X), decimal=6)
assert_almost_equal(np.linalg.norm(X), np.sqrt(squared_norm(X)), decimal=6)
def test_row_norms():
X = np.random.RandomState(42).randn(100, 100)
sq_norm = (X ** 2).sum(axis=1)
assert_array_almost_equal(sq_norm, row_norms(X, squared=True), 5)
assert_array_almost_equal(np.sqrt(sq_norm), row_norms(X))
Xcsr = sparse.csr_matrix(X, dtype=np.float32)
assert_array_almost_equal(sq_norm, row_norms(Xcsr, squared=True), 5)
assert_array_almost_equal(np.sqrt(sq_norm), row_norms(Xcsr))
def test_randomized_svd_low_rank_with_noise():
# Check that extmath.randomized_svd can handle noisy matrices
n_samples = 100
n_features = 500
rank = 5
k = 10
# generate a matrix X wity structure approximate rank `rank` and an
# important noisy component
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.1,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
_, s, _ = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'none', 'LU', 'QR']:
# compute the singular values of X using the fast approximate
# method without the iterated power method
_, sa, _ = randomized_svd(X, k, n_iter=0,
power_iteration_normalizer=normalizer)
# the approximation does not tolerate the noise:
assert_greater(np.abs(s[:k] - sa).max(), 0.01)
# compute the singular values of X using the fast approximate
# method with iterated power method
_, sap, _ = randomized_svd(X, k,
power_iteration_normalizer=normalizer)
# the iterated power method is helping getting rid of the noise:
assert_almost_equal(s[:k], sap, decimal=3)
def test_randomized_svd_infinite_rank():
# Check that extmath.randomized_svd can handle noisy matrices
n_samples = 100
n_features = 500
rank = 5
k = 10
# let us try again without 'low_rank component': just regularly but slowly
# decreasing singular values: the rank of the data matrix is infinite
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=1.0,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
# compute the singular values of X using the slow exact method
_, s, _ = linalg.svd(X, full_matrices=False)
for normalizer in ['auto', 'none', 'LU', 'QR']:
# compute the singular values of X using the fast approximate method
# without the iterated power method
_, sa, _ = randomized_svd(X, k, n_iter=0,
power_iteration_normalizer=normalizer)
# the approximation does not tolerate the noise:
assert_greater(np.abs(s[:k] - sa).max(), 0.1)
# compute the singular values of X using the fast approximate method
# with iterated power method
_, sap, _ = randomized_svd(X, k, n_iter=5,
power_iteration_normalizer=normalizer)
# the iterated power method is still managing to get most of the
# structure at the requested rank
assert_almost_equal(s[:k], sap, decimal=3)
def test_randomized_svd_transpose_consistency():
# Check that transposing the design matrix has limit impact
n_samples = 100
n_features = 500
rank = 4
k = 10
X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features,
effective_rank=rank, tail_strength=0.5,
random_state=0)
assert_equal(X.shape, (n_samples, n_features))
U1, s1, V1 = randomized_svd(X, k, n_iter=3, transpose=False,
random_state=0)
U2, s2, V2 = randomized_svd(X, k, n_iter=3, transpose=True,
random_state=0)
U3, s3, V3 = randomized_svd(X, k, n_iter=3, transpose='auto',
random_state=0)
U4, s4, V4 = linalg.svd(X, full_matrices=False)
assert_almost_equal(s1, s4[:k], decimal=3)
assert_almost_equal(s2, s4[:k], decimal=3)
assert_almost_equal(s3, s4[:k], decimal=3)
assert_almost_equal(np.dot(U1, V1), np.dot(U4[:, :k], V4[:k, :]),
decimal=2)
assert_almost_equal(np.dot(U2, V2), np.dot(U4[:, :k], V4[:k, :]),
decimal=2)
# in this case 'auto' is equivalent to transpose
assert_almost_equal(s2, s3)
def test_randomized_svd_power_iteration_normalizer():
# randomized_svd with power_iteration_normalized='none' diverges for
# large number of power iterations on this dataset
rng = np.random.RandomState(42)
X = make_low_rank_matrix(300, 1000, effective_rank=50, random_state=rng)
X += 3 * rng.randint(0, 2, size=X.shape)
n_components = 50
# Check that it diverges with many (non-normalized) power iterations
U, s, V = randomized_svd(X, n_components, n_iter=2,
power_iteration_normalizer='none')
A = X - U.dot(np.diag(s).dot(V))
error_2 = linalg.norm(A, ord='fro')
U, s, V = randomized_svd(X, n_components, n_iter=20,
power_iteration_normalizer='none')
A = X - U.dot(np.diag(s).dot(V))
error_20 = linalg.norm(A, ord='fro')
print(error_2 - error_20)
assert_greater(np.abs(error_2 - error_20), 100)
for normalizer in ['LU', 'QR', 'auto']:
U, s, V = randomized_svd(X, n_components, n_iter=2,
power_iteration_normalizer=normalizer)
A = X - U.dot(np.diag(s).dot(V))
error_2 = linalg.norm(A, ord='fro')
for i in [5, 10, 50]:
U, s, V = randomized_svd(X, n_components, n_iter=i,
power_iteration_normalizer=normalizer)
A = X - U.dot(np.diag(s).dot(V))
error = linalg.norm(A, ord='fro')
print(error_2 - error)
assert_greater(15, np.abs(error_2 - error))
def test_svd_flip():
# Check that svd_flip works in both situations, and reconstructs input.
rs = np.random.RandomState(1999)
n_samples = 20
n_features = 10
X = rs.randn(n_samples, n_features)
# Check matrix reconstruction
U, S, V = linalg.svd(X, full_matrices=False)
U1, V1 = svd_flip(U, V, u_based_decision=False)
assert_almost_equal(np.dot(U1 * S, V1), X, decimal=6)
# Check transposed matrix reconstruction
XT = X.T
U, S, V = linalg.svd(XT, full_matrices=False)
U2, V2 = svd_flip(U, V, u_based_decision=True)
assert_almost_equal(np.dot(U2 * S, V2), XT, decimal=6)
# Check that different flip methods are equivalent under reconstruction
U_flip1, V_flip1 = svd_flip(U, V, u_based_decision=True)
assert_almost_equal(np.dot(U_flip1 * S, V_flip1), XT, decimal=6)
U_flip2, V_flip2 = svd_flip(U, V, u_based_decision=False)
assert_almost_equal(np.dot(U_flip2 * S, V_flip2), XT, decimal=6)
def test_randomized_svd_sign_flip():
a = np.array([[2.0, 0.0], [0.0, 1.0]])
u1, s1, v1 = randomized_svd(a, 2, flip_sign=True, random_state=41)
for seed in range(10):
u2, s2, v2 = randomized_svd(a, 2, flip_sign=True, random_state=seed)
assert_almost_equal(u1, u2)
assert_almost_equal(v1, v2)
assert_almost_equal(np.dot(u2 * s2, v2), a)
assert_almost_equal(np.dot(u2.T, u2), np.eye(2))
assert_almost_equal(np.dot(v2.T, v2), np.eye(2))
def test_cartesian():
# Check if cartesian product delivers the right results
axes = (np.array([1, 2, 3]), np.array([4, 5]), np.array([6, 7]))
true_out = np.array([[1, 4, 6],
[1, 4, 7],
[1, 5, 6],
[1, 5, 7],
[2, 4, 6],
[2, 4, 7],
[2, 5, 6],
[2, 5, 7],
[3, 4, 6],
[3, 4, 7],
[3, 5, 6],
[3, 5, 7]])
out = cartesian(axes)
assert_array_equal(true_out, out)
# check single axis
x = np.arange(3)
assert_array_equal(x[:, np.newaxis], cartesian((x,)))
def test_logistic_sigmoid():
# Check correctness and robustness of logistic sigmoid implementation
naive_logistic = lambda x: 1 / (1 + np.exp(-x))
naive_log_logistic = lambda x: np.log(naive_logistic(x))
x = np.linspace(-2, 2, 50)
assert_array_almost_equal(log_logistic(x), naive_log_logistic(x))
extreme_x = np.array([-100., 100.])
assert_array_almost_equal(log_logistic(extreme_x), [-100, 0])
def test_fast_dot():
# Check fast dot blas wrapper function
if fast_dot is np.dot:
return
rng = np.random.RandomState(42)
A = rng.random_sample([2, 10])
B = rng.random_sample([2, 10])
try:
linalg.get_blas_funcs(['gemm'])[0]
has_blas = True
except (AttributeError, ValueError):
has_blas = False
if has_blas:
# Test _fast_dot for invalid input.
# Maltyped data.
for dt1, dt2 in [['f8', 'f4'], ['i4', 'i4']]:
assert_raises(ValueError, _fast_dot, A.astype(dt1),
B.astype(dt2).T)
# Malformed data.
# ndim == 0
E = np.empty(0)
assert_raises(ValueError, _fast_dot, E, E)
# ndim == 1
assert_raises(ValueError, _fast_dot, A, A[0])
# ndim > 2
assert_raises(ValueError, _fast_dot, A.T, np.array([A, A]))
# min(shape) == 1
assert_raises(ValueError, _fast_dot, A, A[0, :][None, :])
# test for matrix mismatch error
assert_raises(ValueError, _fast_dot, A, A)
# Test cov-like use case + dtypes.
for dtype in ['f8', 'f4']:
A = A.astype(dtype)
B = B.astype(dtype)
# col < row
C = np.dot(A.T, A)
C_ = fast_dot(A.T, A)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A.T, B)
C_ = fast_dot(A.T, B)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A, B.T)
C_ = fast_dot(A, B.T)
assert_almost_equal(C, C_, decimal=5)
# Test square matrix * rectangular use case.
A = rng.random_sample([2, 2])
for dtype in ['f8', 'f4']:
A = A.astype(dtype)
B = B.astype(dtype)
C = np.dot(A, B)
C_ = fast_dot(A, B)
assert_almost_equal(C, C_, decimal=5)
C = np.dot(A.T, B)
C_ = fast_dot(A.T, B)
assert_almost_equal(C, C_, decimal=5)
if has_blas:
for x in [np.array([[d] * 10] * 2) for d in [np.inf, np.nan]]:
assert_raises(ValueError, _fast_dot, x, x.T)
def test_incremental_variance_update_formulas():
# Test Youngs and Cramer incremental variance formulas.
# Doggie data from http://www.mathsisfun.com/data/standard-deviation.html
A = np.array([[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300],
[600, 470, 170, 430, 300]]).T
idx = 2
X1 = A[:idx, :]
X2 = A[idx:, :]
old_means = X1.mean(axis=0)
old_variances = X1.var(axis=0)
old_sample_count = X1.shape[0]
final_means, final_variances, final_count = \
_incremental_mean_and_var(X2, old_means, old_variances,
old_sample_count)
assert_almost_equal(final_means, A.mean(axis=0), 6)
assert_almost_equal(final_variances, A.var(axis=0), 6)
assert_almost_equal(final_count, A.shape[0])
@skip_if_32bit
def test_incremental_variance_numerical_stability():
# Test Youngs and Cramer incremental variance formulas.
def np_var(A):
return A.var(axis=0)
# Naive one pass variance computation - not numerically stable
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
def one_pass_var(X):
n = X.shape[0]
exp_x2 = (X ** 2).sum(axis=0) / n
expx_2 = (X.sum(axis=0) / n) ** 2
return exp_x2 - expx_2
# Two-pass algorithm, stable.
# We use it as a benchmark. It is not an online algorithm
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Two-pass_algorithm
def two_pass_var(X):
mean = X.mean(axis=0)
Y = X.copy()
return np.mean((Y - mean)**2, axis=0)
# Naive online implementation
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm
# This works only for chunks for size 1
def naive_mean_variance_update(x, last_mean, last_variance,
last_sample_count):
updated_sample_count = (last_sample_count + 1)
samples_ratio = last_sample_count / float(updated_sample_count)
updated_mean = x / updated_sample_count + last_mean * samples_ratio
updated_variance = last_variance * samples_ratio + \
(x - last_mean) * (x - updated_mean) / updated_sample_count
return updated_mean, updated_variance, updated_sample_count
# We want to show a case when one_pass_var has error > 1e-3 while
# _batch_mean_variance_update has less.
tol = 200
n_features = 2
n_samples = 10000
x1 = np.array(1e8, dtype=np.float64)
x2 = np.log(1e-5, dtype=np.float64)
A0 = x1 * np.ones((n_samples // 2, n_features), dtype=np.float64)
A1 = x2 * np.ones((n_samples // 2, n_features), dtype=np.float64)
A = np.vstack((A0, A1))
# Older versions of numpy have different precision
# In some old version, np.var is not stable
if np.abs(np_var(A) - two_pass_var(A)).max() < 1e-6:
stable_var = np_var
else:
stable_var = two_pass_var
# Naive one pass var: >tol (=1063)
assert_greater(np.abs(stable_var(A) - one_pass_var(A)).max(), tol)
# Starting point for online algorithms: after A0
# Naive implementation: >tol (436)
mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2
for i in range(A1.shape[0]):
mean, var, n = \
naive_mean_variance_update(A1[i, :], mean, var, n)
assert_equal(n, A.shape[0])
# the mean is also slightly unstable
assert_greater(np.abs(A.mean(axis=0) - mean).max(), 1e-6)
assert_greater(np.abs(stable_var(A) - var).max(), tol)
# Robust implementation: <tol (177)
mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2
for i in range(A1.shape[0]):
mean, var, n = \
_incremental_mean_and_var(A1[i, :].reshape((1, A1.shape[1])),
mean, var, n)
assert_equal(n, A.shape[0])
assert_array_almost_equal(A.mean(axis=0), mean)
assert_greater(tol, np.abs(stable_var(A) - var).max())
def test_incremental_variance_ddof():
# Test that degrees of freedom parameter for calculations are correct.
rng = np.random.RandomState(1999)
X = rng.randn(50, 10)
n_samples, n_features = X.shape
for batch_size in [11, 20, 37]:
steps = np.arange(0, X.shape[0], batch_size)
if steps[-1] != X.shape[0]:
steps = np.hstack([steps, n_samples])
for i, j in zip(steps[:-1], steps[1:]):
batch = X[i:j, :]
if i == 0:
incremental_means = batch.mean(axis=0)
incremental_variances = batch.var(axis=0)
# Assign this twice so that the test logic is consistent
incremental_count = batch.shape[0]
sample_count = batch.shape[0]
else:
result = _incremental_mean_and_var(
batch, incremental_means, incremental_variances,
sample_count)
(incremental_means, incremental_variances,
incremental_count) = result
sample_count += batch.shape[0]
calculated_means = np.mean(X[:j], axis=0)
calculated_variances = np.var(X[:j], axis=0)
assert_almost_equal(incremental_means, calculated_means, 6)
assert_almost_equal(incremental_variances,
calculated_variances, 6)
assert_equal(incremental_count, sample_count)
def test_vector_sign_flip():
# Testing that sign flip is working & largest value has positive sign
data = np.random.RandomState(36).randn(5, 5)
max_abs_rows = np.argmax(np.abs(data), axis=1)
data_flipped = _deterministic_vector_sign_flip(data)
max_rows = np.argmax(data_flipped, axis=1)
assert_array_equal(max_abs_rows, max_rows)
signs = np.sign(data[range(data.shape[0]), max_abs_rows])
assert_array_equal(data, data_flipped * signs[:, np.newaxis])
def test_softmax():
rng = np.random.RandomState(0)
X = rng.randn(3, 5)
exp_X = np.exp(X)
sum_exp_X = np.sum(exp_X, axis=1).reshape((-1, 1))
assert_array_almost_equal(softmax(X), exp_X / sum_exp_X)
| bsd-3-clause |
pombredanne/nTLP | examples/autonomous_car_road.py | 1 | 11883 | #!/usr/bin/env python
""" The autonomous car example presented in the CDC/HSCC paper, illustrating the use of
rhtlp module.
ORIGINALLY BY Nok Wongpiromsarn (nok@cds.caltech.edu)
August 28, 2010
Small modifications by Yuchen Lin.
12 Aug 2011
"""
#@import_section@
import sys, os
import math
from numpy import array
from subprocess import call
from tulip.polytope import Polytope
from tulip.discretize import CtsSysDyn
from tulip.spec import GRSpec
from tulip.rhtlp import RHTLPProb, ShortHorizonProb
from tulip import grsim
#@import_section_end@
# Road configuration and Problem setup
#@roadsetup@
roadWidth = 3
roadLength = 10
dpopup = 2
dsr = 3
horizon = 3
#@roadsetup_end@
# Continuous dynamics: \dot{x} = u_x, \dot{y} = u_y
#@contdyn@
A = array([[1.1052, 0.],[ 0., 1.1052]])
B = array([[1.1052, 0.],[ 0., 1.1052]])
U = Polytope(array([[1., 0.],[-1., 0.], [0., 1.], [0., -1.]]), array([[1.],[1.],[1.],[1.]]))
sys_dyn = CtsSysDyn(A,B,[],[],U,[])
#@contdyn_end@
# Variables and propositions
#@varprop@
env_vars = {}
cont_props = {}
for x in xrange(0, roadLength):
for y in xrange(0, roadWidth):
id = str((y*roadLength)+x)
obs = 'obs' + id
cell = 'X' + id
env_vars[obs] = 'boolean'
cont_props[cell] = Polytope(array([[1., 0.], [-1., 0.], [0., 1.], [0., -1.]]),
array([[x+1.], [-float(x)], [y+1.], [-float(y)]]))
#@varprop_end@
########################################
# Specification
########################################
#@spec@
spec = GRSpec()
spec.env_init='' # Hack to use GRSpec in an outdated way.
spec.env_safety=''
spec.env_prog=''
spec.sys_init=''
spec.sys_safety=''
spec.sys_prog=''
init_cells = range(0, roadLength*(roadWidth-1)+1, roadLength)
# Assumption on the initial state
for id in xrange(0, roadLength*roadWidth):
if (not id in init_cells):
if (len(spec.sys_init) > 0):
spec.sys_init += ' & '
spec.sys_init += '!X' + str(id)
spec.sys_init = '(' + spec.sys_init + ')'
# If started in the left lane, then there is an obstalce in the right lane.
for x in xrange(0, roadLength):
cell = ''
for y in xrange(int(math.floor(roadWidth/2)), roadWidth):
if (len(cell) > 0):
cell += ' | '
cell += 'X' + str(y*roadLength + x)
obs = ''
for obsx in xrange(max([0, x-1]), min([roadLength, x+2])):
if (len(obs) > 0):
obs += ' | '
obs += 'obs' + str(obsx)
if (len(spec.sys_init) > 0):
spec.sys_init += ' &\n\t'
spec.sys_init += '((' + cell + ') -> (' + obs + '))'
for id in init_cells:
obs = 'obs' + str(id)
cell = 'X' + str(id)
# The robot does not collide with an obstacle
if (len(spec.sys_init) > 0):
spec.sys_init += ' &\n\t'
spec.sys_init += '(' + cell + ' -> !' + obs + ')'
# The robot is not surrounded by obstacles
spec.sys_init += ' &\n\t'
spec.sys_init += '(' + cell + ' -> !(' + 'obs' + str(id+1)
if (math.floor(id/roadLength) < roadWidth - 1):
spec.sys_init += ' & obs' + str(id + roadLength)
if (math.floor(id/roadLength) > 0):
spec.sys_init += ' & obs' + str(id - roadLength)
spec.sys_init += '))'
# Assumption on the environment
# Obstacle is always detected before the robot gets too close to it
for x in xrange(0,roadLength):
cell = ''
for j in xrange(max([0, x-dpopup]), min([roadLength, x+dpopup+1])):
for k in xrange(0, roadWidth):
if (len(cell) > 0):
cell += ' | '
cell += 'X' + str(k*roadLength + j)
for k in xrange(0, roadWidth):
obs = 'obs' + str(k*roadLength+x)
if (len(spec.env_safety) > 0):
spec.env_safety += ' &\n\t'
spec.env_safety += '(((' + cell +') & !' + obs + ') -> next(!' + obs + '))'
# Sensing range
for x in xrange(0,roadLength):
cell = ''
for y in xrange(0,roadWidth):
if (len(cell) > 0):
cell += ' | '
cell += 'X' + str(y*roadLength + x)
obs = ''
for j in xrange(x+dsr, roadLength):
for k in xrange(0, roadWidth):
if (len(obs) > 0):
obs += ' & '
obs += '!obs' + str(k*roadLength + j)
for j in xrange(0, x-dsr+1):
for k in xrange(0, roadWidth):
if (len(obs) > 0):
obs += ' & '
obs += '!obs' + str(k*roadLength + j)
if (len(obs) > 0):
if (len(spec.env_safety) > 0):
spec.env_safety += ' &\n\t'
spec.env_safety += '((' + cell + ') -> (' + obs + '))'
# The road is not blocked
for i in xrange(0, roadLength):
for j in xrange(max([0, i-1]), min([i+2, roadLength])):
for k in xrange(max([0,j-1]), min([j+2,roadLength])):
if (len(spec.env_safety) > 0):
spec.env_safety += ' &\n\t'
spec.env_safety += '!(obs' + str(i) + ' & obs' + str(roadLength+j) + \
' & obs' + str(2*roadLength+k) + ')'
for x in xrange(0, roadLength-2):
if (len(spec.env_safety) > 0):
spec.env_safety += ' &\n\t'
spec.env_safety += '((obs' + str(roadLength+x) + ' & obs' + str(roadLength+x+1) + \
') -> (!obs' + str(x+2) + ' & !obs' + str(roadLength+x+2) + \
' & !obs' + str(2*roadLength+x+2) + '))'
# Obstacle does not disappear
for x in xrange(0, roadLength):
for y in xrange(0, roadWidth):
obs = 'obs' + str((y*roadLength)+x)
if (len(spec.env_safety) > 0):
spec.env_safety += ' &\n\t'
spec.env_safety += '(' + obs + ' -> next(' + obs + '))'
# Guarantee
# No collision
for x in xrange(0, roadLength):
for y in xrange(0, roadWidth):
id = str((y*roadLength)+x)
obs = 'obs' + id
cell = 'X' + id
if (len(spec.sys_safety) > 0):
spec.sys_safety += ' &\n\t'
spec.sys_safety += '(' + obs + ' -> !' + cell + ')'
# Stay in the right lane unless the lane is blocked
for x in xrange(0, roadLength):
cell = ''
for y in xrange(int(math.floor(roadWidth/2)), roadWidth):
if (len(cell) > 0):
cell += ' | '
cell += 'X' + str(y*roadLength + x)
obs = ''
for obsx in xrange(max([0, x-1]), min([roadLength, x+2])):
for obsy in xrange(0, int(math.floor(roadWidth/2)+1)):
if (len(obs) > 0):
obs += ' | '
obs += 'obs' + str(obsy*roadLength + obsx)
if (len(spec.sys_safety) > 0):
spec.sys_safety += ' &\n\t'
spec.sys_safety += '((' + cell + ') -> (' + obs + '))'
# Get to the end of the road
final_cells = range(roadLength-1, roadLength*roadWidth, roadLength)
cell = ''
for fcell in final_cells:
if (len(cell) > 0):
cell += ' | '
cell += 'X' + str(fcell)
spec.sys_prog = '(' + cell + ')'
#@spec_end@
#@prob@
rhtlpprob = RHTLPProb(shprobs=[], Phi='True', discretize=False,
env_vars = env_vars, sys_disc_vars = {},
disc_props = {}, cont_props = cont_props, spec = spec,
sp_name = os.path.join('tmpspec', 'a_car'))
#@prob_end@
########################################
# Short Horizon Problems
########################################
#@shorthoriz@
for x_init in xrange(0, roadLength):
print 'adding W' + str(x_init)
# Environment variables
env_vars = {}
for y in xrange(0, roadWidth):
for x in xrange(x_init, x_init+horizon):
varname = 'obs' + str((y*roadLength)+x)
env_vars[varname] = 'boolean'
# System continuous variable
sys_cont_vars = ['x', 'y']
cont_state_space = Polytope(array([[1., 0.], [-1., 0.], [0., 1.], [0., -1.]]), \
array([[float(min([x_init+horizon, roadLength]))], \
[float(-x_init)], \
[float(roadWidth)], \
[0.]]))
# W
initCells = range(x_init, x_init+roadLength*(roadWidth-1)+1, roadLength)
W = ''
for i in initCells:
if (len(W) > 0):
W += ' | '
W += 'X' + str(i)
print W
# Phi
Phi = ''
for id in initCells:
obs = 'obs' + str(id)
cell = 'X' + str(id)
# The robot does not collide with an obstacle
if (len(Phi) > 0):
Phi += ' &\n\t'
Phi += '(' + cell + ' -> !' + obs + ')'
# The robot is not surrounded by obstacles
if (id % roadLength < roadLength-1):
Phi += ' &\n\t'
Phi += '(' + cell + ' -> !(' + 'obs' + str(id+1)
if (math.floor(id/roadLength) < roadWidth - 1):
Phi += ' & obs' + str(id + roadLength)
if (math.floor(id/roadLength) > 0):
Phi += ' & obs' + str(id - roadLength)
Phi += '))'
# If started in the left lane, then there is an obstalce in the right lane.
if (math.floor(id/roadLength) >= math.floor(roadWidth/2)):
Phi += ' &\n\t'
obs = ''
x = id % roadLength
for obsx in xrange(max([0, x-1]), min([roadLength, x+2])):
for obsy in xrange(0, int(math.floor(roadWidth/2)+1)):
if (len(obs) > 0):
obs += ' | '
obs += 'obs' + str(obsy*roadLength + obsx)
Phi += '(' + cell + ' -> (' + obs + '))'
rhtlpprob.addSHProb(ShortHorizonProb(W=W, FW=[], Phi=Phi,
global_prob = rhtlpprob,
env_vars = env_vars,
sys_disc_vars = {},
disc_props = {},
cont_state_space=cont_state_space,
cont_props = cont_props,
sys_dyn = sys_dyn,
sp_name = os.path.join('tmpspec',
'W'+str(x_init))))
#@shorthoriz_end@
#@setF@
for x_init in xrange(0, roadLength):
FWind = min([roadLength-1, x_init+horizon-1])
rhtlpprob.shprobs[x_init].setFW(FW=rhtlpprob.shprobs[FWind], update=True, verbose=3)
#@setF_end@
# Validate whether rhtpprob is valid
#@valid@
ret = rhtlpprob.validate()
#@valid_end@
# The result of the above validate() call is
# state
# (= X25 false)
# (= X24 false)
# (= X27 false)
# (= X26 false)
# (= X23 false)
# (= X22 false)
# (= X21 false)
# (= X20 false)
# (= X8 false)
# (= X9 false)
# (= X2 false)
# (= X3 false)
# (= X0 false)
# (= X1 false)
# (= X6 false)
# (= X7 false)
# (= X4 false)
# (= X5 false)
# (= X18 false)
# (= X19 false)
# (= X10 false)
# (= X11 false)
# (= X12 false)
# (= X13 false)
# (= X14 false)
# (= X15 false)
# (= X16 false)
# (= X17 false)
# (= X29 false)
# (= X28 false)
# is not in any W
# Since we know that we don't have to deal with the above state, we will exclude it.
#@exclude@
excluded_state = {}
for id in xrange(0, roadLength*roadWidth):
excluded_state['X'+str(id)] = False
ret = rhtlpprob.validate(excluded_state=excluded_state)
print ret
#@exclude_end@
# Synthesize automatons for each short horizon problem.
aut_list = [shprob.synthesizePlannerAut() for shprob in rhtlpprob.shprobs]
# Remove dead-end states from automata.
for aut in aut_list:
aut.trimDeadStates()
if raw_input("Do you want to open in Gephi? (y/n)") == 'y':
# Generate graph of all automatons.
destfile = 'acar_example.gexf'
grsim.writeStatesToFile(aut_list, destfile)
# Display graph.
try:
print "Opening GEXF file in Gephi."
call(["gephi", destfile])
except:
print "Failed to open " + destfile + " in Gephi. Try:\n\n" + \
"gephi " + destfile + "\n\n"
| bsd-3-clause |
Sabayon/entropy | lib/tests/db.py | 1 | 46183 | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '.')
sys.path.insert(0, '../')
import unittest
import os
import time
import threading
from entropy.client.interfaces import Client
from entropy.const import etpConst, const_convert_to_unicode, \
const_convert_to_rawstring, const_mkstemp
from entropy.output import set_mute
from entropy.core.settings.base import SystemSettings
from entropy.misc import ParallelTask
from entropy.db import EntropyRepository
import tests._misc as _misc
import entropy.dep
import entropy.tools
class EntropyRepositoryTest(unittest.TestCase):
def setUp(self):
self.Client = Client(installed_repo = -1, indexing = False,
xcache = False, repo_validation = False)
self.Spm = self.Client.Spm()
self.test_db_name = "test_suite"
self.test_db = self.__open_test_db(":memory:")
# GenericRepository supports package masking if this property is set
self.test_db.enable_mask_filter = True
self.test_db2 = self.__open_test_db(":memory:")
# GenericRepository supports package masking if this property is set
self.test_db2.enable_mask_filter = True
self._settings = SystemSettings()
# since package files have been produced on amd64, add the same
# arch to etpConst['keywords'] to avoid atomMatch failures on x86
# and arm/other arches.
self._original_keywords = etpConst['keywords'].copy()
etpConst['keywords'].add("~amd64")
etpConst['keywords'].add("amd64")
def tearDown(self):
"""
tearDown is run after each test
"""
self.test_db.close()
self.test_db2.close()
# calling destroy() and shutdown()
# need to call destroy() directly to remove all the SystemSettings
# plugins because shutdown() doesn't, since it's meant to be called
# right before terminating the process
self.Client.destroy()
self.Client.shutdown()
etpConst['keywords'] = self._original_keywords.copy()
def __open_test_db(self, tmp_path):
return self.Client.open_temp_repository(name = self.test_db_name,
temp_file = tmp_path)
def test_db_clearcache(self):
self.test_db.clearCache()
def test_treeupdates_config_files_update(self):
files = _misc.get_config_files_updates_test_files()
actions = [
"move app-admin/anaconda app-admin/fuckaconda",
"slotmove app-admin/anaconda 0 2", # unsupported
"move media-sound/xmms2 media-sound/deadashell",
"move media-sound/pulseaudio media-sound/idiotaudio",
"move sys-auth/pambase sys-auth/fuckbase",
"move sys-devel/gcc sys-devel/fuckcc"
]
config_map = {
'._cfg0000_packages.db.critical': 'faa50df927223bb6de967e33179803b7',
'._cfg0000_packages.db.system_mask': 'b7f536785e315f7c104c7185b0bfe608',
'._cfg0000_packages.server.dep_blacklist.test': '8180f9e89d57f788e5b4bab05e30d447',
'._cfg0000_packages.server.dep_rewrite.test': 'c31d66b7f03c725e586a6e22941b8082',
}
for file_path in files:
updated_files = self.test_db._runConfigurationFilesUpdate(actions,
[file_path])
self.assertTrue(len(updated_files) == 1)
updated_file = list(updated_files)[0]
md5sum = entropy.tools.md5sum(updated_file)
os.remove(updated_file)
updated_name = os.path.basename(updated_file)
self.assertEqual(config_map[updated_name], md5sum)
def test_treeupdates_actions(self):
self.assertEqual(self.test_db.listAllTreeUpdatesActions(), tuple())
updates = (
('move media-libs/x264-svn media-libs/x264', '2020', '1210199116.46'),
('slotmove x11-libs/lesstif 2.1 0', '2020', '1210753863.16')
)
updates_out = (
(1, 'test_suite', 'move media-libs/x264-svn media-libs/x264', '2020', '1210199116.46'),
(2, 'test_suite', 'slotmove x11-libs/lesstif 2.1 0', '2020', '1210753863.16')
)
actions = tuple(sorted(['move media-libs/x264-svn media-libs/x264',
'slotmove x11-libs/lesstif 2.1 0']))
self.test_db.insertTreeUpdatesActions(updates, self.test_db_name)
db_actions = self.test_db.retrieveTreeUpdatesActions(self.test_db_name)
self.assertEqual(actions, db_actions)
self.assertEqual(updates_out, self.test_db.listAllTreeUpdatesActions())
self.test_db.removeTreeUpdatesActions(self.test_db_name)
db_actions = self.test_db.retrieveTreeUpdatesActions(self.test_db_name)
self.assertEqual(tuple(), db_actions)
def test_contentsafety(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
path = "/usr/include/zconf.h"
content_safety = self.test_db.searchContentSafety(path)
self.assertEqual(content_safety, (
{'package_id': 1,
'sha256': data['content_safety'][path]['sha256'],
'path': '/usr/include/zconf.h',
'mtime': data['content_safety'][path]['mtime']},)
)
def test_needed(self):
test_pkg1 = _misc.get_test_package()
test_pkg2 = _misc.get_test_package4()
for test_pkg in (test_pkg1, test_pkg2):
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
db_needed = self.test_db.retrieveNeededLibraries(
idpackage)
self.assertEqual(db_needed, data['needed_libs'])
db_needed = self.test_db.retrieveNeededLibraries(idpackage)
self.assertEqual(db_needed, data['needed_libs'])
def test_dependencies(self):
test_pkg = _misc.get_test_package3()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
pkg_deps = sorted(
self.test_db.retrieveDependencies(
idpackage, extended = True))
orig_pkg_deps = sorted([
('=dev-libs/apr-1*', 0),
('dev-libs/openssl', 0),
('dev-libs/libpcre', 0),
('=dev-libs/apr-util-1*', 0),
('=dev-libs/apr-1*', 3),
('dev-libs/openssl', 3),
('dev-libs/libpcre', 3),
('=dev-libs/apr-util-1*', 3),
])
self.assertEqual(pkg_deps, orig_pkg_deps)
def test_use_dependencies(self):
test_pkg = _misc.get_test_entropy_package6()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
useflags = self.test_db.retrieveUseflags(idpackage)
self.assertTrue("gtk" not in useflags)
self.assertTrue("-gtk" in useflags)
self.assertTrue("-kde" in useflags)
self.assertTrue("-debug" in useflags)
self.assertTrue("-examples" in useflags)
def test_content(self):
test_pkg = _misc.get_test_package3()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
content = self.test_db.retrieveContent(
idpackage, extended = True, order_by="file")
orig_content = (('/usr/sbin/ab2-ssl', 'sym'),
('/usr/sbin/logresolve2', 'sym'),
('/usr/sbin/log_server_status', 'obj'),
('/usr/sbin/checkgid2', 'sym'),
('/usr/sbin/htdbm', 'obj'),
('/usr/sbin/rotatelogs2', 'sym'),
('/usr/share/man/man1/htpasswd.1.bz2', 'obj'),
('/usr/sbin/ab-ssl', 'sym'),
('/usr/sbin/htcacheclean2', 'sym'),
('/usr/sbin/split-logfile2', 'sym'),
('/usr/share/man/man8', 'dir'),
('/usr/sbin/htcacheclean', 'obj'),
('/usr/sbin', 'dir'), ('/usr/sbin/ab', 'obj'),
('/usr/share/doc/apache-tools-2.2.11/CHANGES.bz2', 'obj'),
('/usr/sbin/htpasswd', 'obj'), ('/usr', 'dir'),
('/usr/bin/htpasswd', 'sym'),
('/usr/share/man/man1/htdigest.1.bz2', 'obj'),
('/usr/sbin/dbmmanage', 'obj'), ('/usr/share', 'dir'),
('/usr/share/man/man1', 'dir'), ('/usr/sbin/htdbm2', 'sym'),
('/usr/sbin/log_server_status2', 'sym'),
('/usr/share/man/man1/dbmmanage.1.bz2', 'obj'),
('/usr/share/man', 'dir'), ('/usr/sbin/htpasswd2', 'sym'),
('/usr/sbin/htdigest2', 'sym'), ('/usr/sbin/httxt2dbm2', 'sym'),
('/usr/bin', 'dir'), ('/usr/sbin/logresolve', 'obj'),
('/usr/share/doc', 'dir'), ('/usr/share/man/man8/ab.8.bz2', 'obj'),
('/usr/share/man/man8/logresolve.8.bz2', 'obj'),
('/usr/share/man/man8/htcacheclean.8.bz2', 'obj'),
('/usr/sbin/rotatelogs', 'obj'), ('/usr/sbin/checkgid', 'obj'),
('/usr/share/man/man1/htdbm.1.bz2', 'obj'),
('/usr/sbin/dbmmanage2', 'sym'), ('/usr/sbin/httxt2dbm', 'obj'),
('/usr/sbin/split-logfile', 'obj'),
('/usr/sbin/htdigest', 'obj'),
('/usr/share/doc/apache-tools-2.2.11', 'dir'),
('/usr/sbin/ab2', 'sym'),
('/usr/share/man/man8/rotatelogs.8.bz2', 'obj')
)
self.assertEqual(
content,
tuple(sorted(orig_content, key = lambda x: x[0])))
def test_db_creation(self):
self.assertTrue(isinstance(self.test_db, EntropyRepository))
self.assertEqual(self.test_db_name, self.test_db.repository_id())
self.assertTrue(self.test_db._doesTableExist('baseinfo'))
self.assertTrue(self.test_db._doesTableExist('extrainfo'))
def test_db_metadata_handling(self):
test_entry = {
const_convert_to_unicode("/path/to/foo", "utf-8"): \
const_convert_to_unicode("dir", "utf-8"),
const_convert_to_unicode("/path/to/foo/foo", "utf-8"): \
const_convert_to_unicode("obj", "utf-8"),
}
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
data['content'].update(test_entry.copy())
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
test_pkg2 = _misc.get_test_package2()
data2 = self.Spm.extract_package_metadata(test_pkg2)
data2['content'].update(test_entry.copy())
idpackage2 = self.test_db2.addPackage(data2)
db_data2 = self.test_db2.getPackageData(idpackage2)
cont_diff = self.test_db.contentDiff(idpackage, self.test_db2,
idpackage2)
for key in test_entry:
try:
self.assertTrue(key not in cont_diff)
except AssertionError:
print(key)
raise
py_diff = sorted([x for x in db_data['content'] if x not in \
db_data2['content']])
self.assertEqual(sorted(cont_diff), py_diff)
orig_diff = ['/lib64', '/lib64/libz.so', '/lib64/libz.so.1',
'/lib64/libz.so.1.2.3', '/usr/include', '/usr/include/zconf.h',
'/usr/include/zlib.h', '/usr/lib64/libz.a',
'/usr/lib64/libz.so', '/usr/share/doc/zlib-1.2.3-r1',
'/usr/share/doc/zlib-1.2.3-r1/ChangeLog.bz2',
'/usr/share/doc/zlib-1.2.3-r1/FAQ.bz2',
'/usr/share/doc/zlib-1.2.3-r1/README.bz2',
'/usr/share/doc/zlib-1.2.3-r1/algorithm.txt.bz2',
'/usr/share/man', '/usr/share/man/man3',
'/usr/share/man/man3/zlib.3.bz2'
]
orig_diff = [const_convert_to_unicode(x, 'utf-8') for x in orig_diff]
self.assertEqual(orig_diff, py_diff)
versioning_data = self.test_db.getVersioningData(idpackage)
dbverdata = (self.test_db.retrieveVersion(idpackage),
self.test_db.retrieveTag(idpackage),
self.test_db.retrieveRevision(idpackage),)
self.assertEqual(versioning_data, dbverdata)
strict_scope = self.test_db.getStrictScopeData(idpackage)
dbverdata = (self.test_db.retrieveAtom(idpackage),
self.test_db.retrieveSlot(idpackage),
self.test_db.retrieveRevision(idpackage),)
self.assertEqual(strict_scope, dbverdata)
scope_data = self.test_db.getScopeData(idpackage)
dbverdata = (
self.test_db.retrieveAtom(idpackage),
self.test_db.retrieveCategory(idpackage),
self.test_db.retrieveName(idpackage),
self.test_db.retrieveVersion(idpackage),
self.test_db.retrieveSlot(idpackage),
self.test_db.retrieveTag(idpackage),
self.test_db.retrieveRevision(idpackage),
self.test_db.retrieveBranch(idpackage),
self.test_db.retrieveApi(idpackage),
)
self.assertEqual(scope_data, dbverdata)
trigger_info = self.test_db.getTriggerData(idpackage)
trigger_keys = ['version', 'etpapi', 'slot', 'cxxflags', 'cflags',
'chost', 'atom', 'category', 'name', 'versiontag', 'content',
'trigger', 'branch', 'spm_phases', 'revision']
self.assertEqual(sorted(trigger_keys), sorted(trigger_info.keys()))
def test_db_insert_compare_match_provide(self):
test_pkg = _misc.get_test_entropy_package_provide()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
def test_db_cache(self):
test_pkg = _misc.get_test_entropy_package_provide()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
# enable cache
self.test_db._caching = True
key = data['category'] + "/" + data['name']
from entropy.cache import EntropyCacher
cacher = EntropyCacher()
started = cacher.is_started()
cacher.start()
# avoid race conditions, unittest bug
time.sleep(2)
cached = self.test_db._EntropyRepositoryBase__atomMatchFetchCache(
key, True, False, False, None, None, False, False, True)
self.assertTrue(cached is None)
# now store
self.test_db._EntropyRepositoryBase__atomMatchStoreCache(
key, True, False, False, None, None, False, False, True,
result = (123, 0)
)
cacher.sync()
cached = self.test_db._EntropyRepositoryBase__atomMatchFetchCache(
key, True, False, False, None, None, False, False, True)
self.assertEqual(cached, (123, 0))
if not started:
cacher.stop()
def test_db_insert_compare_match(self):
# insert/compare
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
# match
nf_match = (-1, 1)
f_match = (1, 0)
pkg_atom = _misc.get_test_package_atom()
pkg_name = _misc.get_test_package_name()
self.assertEqual(nf_match, self.test_db.atomMatch("slib"))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_name))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_atom))
# test package masking
masking_validation = \
self.Client.ClientSettings()['masking_validation']['cache']
f_match_mask = (1, self.test_db_name,)
self._settings['live_packagemasking']['mask_matches'].add(
f_match_mask)
masking_validation.clear()
self.assertEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
self._settings['live_packagemasking']['mask_matches'].discard(
f_match_mask)
masking_validation.clear()
self.assertNotEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
# now test multimatch
idpackage = self.test_db.addPackage(db_data)
results, rc = self.test_db.atomMatch(pkg_name, multiMatch = True)
self.assertEqual(2, len(results))
self.assertTrue(isinstance(results, set))
self.assertTrue(rc == 0)
results, rc = self.test_db.atomMatch(pkg_name+"foo", multiMatch = True)
self.assertEqual(0, len(results))
self.assertTrue(isinstance(results, set))
self.assertTrue(rc == 1)
def test_db_handle_unicode_puppet_agent(self):
test_pkg = _misc.get_test_package_puppet_agent()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
self.test_db.removePackage(idpackage)
def test_db_handle_unicode_ca_certs(self):
test_pkg = _misc.get_test_package_ca_certs()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
self.test_db.removePackage(idpackage)
def test_db_insert_compare_match_utf(self):
# insert/compare
test_pkg = _misc.get_test_package2()
data = self.Spm.extract_package_metadata(test_pkg)
# Portage stores them this way
unicode_msg = const_convert_to_unicode(
"#248083).\n\n 06 Feb 2009; Ra\xc3\xbal Porcel")
data['changelog'] = unicode_msg
data['license'] = const_convert_to_unicode('GPL-2')
data['licensedata'] = {
const_convert_to_unicode('GPL-2'): unicode_msg,
}
data['content_safety'] = {
unicode_msg: {
'sha256': "abcdbbcdbcdbcdbcdbcdbcdbcbd",
'mtime': 1024.0,
}
}
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
# match
nf_match = (-1, 1)
f_match = (1, 0)
pkg_atom = _misc.get_test_package_atom2()
pkg_name = _misc.get_test_package_name2()
self.assertEqual(nf_match, self.test_db.atomMatch("slib"))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_name))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_atom))
# test package masking
masking_validation = self.Client.ClientSettings(
)['masking_validation']['cache']
f_match_mask = (1, self.test_db_name,)
self._settings['live_packagemasking']['mask_matches'].add(
f_match_mask)
masking_validation.clear()
self.assertEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
self._settings['live_packagemasking']['mask_matches'].discard(
f_match_mask)
masking_validation.clear()
self.assertNotEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
def test_db_insert_compare_match_utf2(self):
# insert/compare
test_pkg = _misc.get_test_package3()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
# match
nf_match = (-1, 1)
f_match = (1, 0)
pkg_atom = _misc.get_test_package_atom3()
pkg_name = _misc.get_test_package_name3()
self.assertEqual(nf_match, self.test_db.atomMatch("slib"))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_name))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_atom))
# test package masking
masking_validation = self.Client.ClientSettings(
)['masking_validation']['cache']
f_match_mask = (1, self.test_db_name,)
self._settings['live_packagemasking']['mask_matches'].add(
f_match_mask)
masking_validation.clear()
self.assertEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
self._settings['live_packagemasking']['mask_matches'].discard(
f_match_mask)
masking_validation.clear()
self.assertNotEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
def test_db_insert_compare_match_mime(self):
# insert/compare
test_pkg = _misc.get_test_package4()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
known_mime = set(['application/ogg', 'audio/x-oggflac', 'audio/x-mp3',
'audio/x-pn-realaudio', 'audio/mpeg', 'application/x-ogm-audio',
'audio/vorbis', 'video/x-ms-asf', 'audio/x-speex', 'audio/x-scpls',
'audio/x-vorbis', 'audio/mpegurl', 'audio/aac', 'audio/x-ms-wma',
'audio/ogg', 'audio/x-mpegurl', 'audio/mp4',
'audio/vnd.rn-realaudio', 'audio/x-vorbis+ogg', 'audio/x-musepack',
'audio/x-flac', 'audio/x-wav'])
self.assertEqual(db_data['provided_mime'], known_mime)
# match
nf_match = (-1, 1)
f_match = (1, 0)
pkg_atom = _misc.get_test_package_atom4()
pkg_name = _misc.get_test_package_name4()
self.assertEqual(nf_match, self.test_db.atomMatch("slib"))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_name))
self.assertEqual(f_match,
self.test_db.atomMatch(pkg_atom))
# test package masking
masking_validation = self.Client.ClientSettings(
)['masking_validation']['cache']
f_match_mask = (1, self.test_db_name,)
self._settings['live_packagemasking']['mask_matches'].add(
f_match_mask)
masking_validation.clear()
self.assertEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
self._settings['live_packagemasking']['mask_matches'].discard(
f_match_mask)
masking_validation.clear()
self.assertNotEqual((-1, 1), self.test_db.atomMatch(pkg_atom))
def test_db_insert_compare_match_tag(self):
# insert/compare
test_pkg = _misc.get_test_entropy_package_tag()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
_misc.clean_pkg_metadata(data)
self.assertEqual(data, db_data)
# match
f_match = (1, 0)
for atom, pkg_id, branch in self.test_db.listAllPackages():
pkg_key = entropy.dep.dep_getkey(atom)
self.assertEqual(f_match, self.test_db.atomMatch(pkg_key))
self.assertEqual(f_match, self.test_db.atomMatch(atom))
self.assertEqual(f_match, self.test_db.atomMatch("~"+atom))
def test_db_multithread(self):
# insert/compare
test_pkg = _misc.get_test_entropy_package_tag()
data = self.Spm.extract_package_metadata(test_pkg)
_misc.clean_pkg_metadata(data)
def handle_pkg(xdata):
idpackage = self.test_db.addPackage(xdata)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(xdata, db_data)
t1 = ParallelTask(handle_pkg, data)
t2 = ParallelTask(handle_pkg, data)
t3 = ParallelTask(handle_pkg, data)
t4 = ParallelTask(handle_pkg, data)
t1.start()
t2.start()
t3.start()
t4.start()
t1.join()
t2.join()
t3.join()
t4.join()
cur_cache = self.test_db._cursor_pool().keys()
self.assertTrue(len(cur_cache) > 0)
self.test_db._cleanup_all()
cur_cache = self.test_db._cursor_pool().keys()
self.assertEqual(len(cur_cache), 0)
def test_db_close_all(self):
"""
This tests if EntropyRepository.close() really closes
all the resources, including those allocated by the
main thread.
"""
test_pkg = _misc.get_test_entropy_package_tag()
data = self.Spm.extract_package_metadata(test_pkg)
_misc.clean_pkg_metadata(data)
_tmp_data = {
"path": None,
"db": None,
"T1": False,
"T2": False,
"T3": False,
}
def handle_pkg(_tmp_data, xdata):
idpackage = self.test_db.addPackage(xdata)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(xdata, db_data)
self.test_db.commit()
fd, buf_file = const_mkstemp()
os.close(fd)
buf = open(buf_file, "wb")
set_mute(True)
self.test_db.exportRepository(buf)
set_mute(False)
buf.flush()
buf.close()
fd, buf_file_db = const_mkstemp()
os.close(fd)
self.test_db.importRepository(buf_file, buf_file_db)
os.remove(buf_file)
db = self.Client.open_generic_repository(buf_file_db)
self.assertTrue(db is not None)
pkg_ids = db.listAllPackageIds()
self.assertTrue(1 in pkg_ids)
_tmp_data['path'] = buf_file_db
_tmp_data['db'] = db
_tmp_data['T1'] = True
def select_pkg(_tmp_data, t1):
t1.join() # wait for t1 to finish
pkg_ids = _tmp_data['db'].listAllPackageIds()
self.assertTrue(1 in pkg_ids)
self.assertTrue(len(pkg_ids) == 1)
_tmp_data['T2'] = True
def close_all(_tmp_data, t1, t2):
t1.join()
t2.join()
_tmp_data['db']._cleanup_all(_cleanup_main_thread=False)
with _tmp_data['db']._cursor_pool_mutex():
cur_cache = _tmp_data['db']._cursor_pool().keys()
self.assertEqual(len(cur_cache), 1) # just MainThread
_tmp_data['db'].close()
with _tmp_data['db']._cursor_pool_mutex():
cur_cache = _tmp_data['db']._cursor_pool().keys()
self.assertEqual(len(cur_cache), 0) # nothing left
_tmp_data['T3'] = True
t1 = ParallelTask(handle_pkg, _tmp_data, data)
t1.name = "T1"
t2 = ParallelTask(select_pkg, _tmp_data, t1)
t2.name = "T2"
t3 = ParallelTask(close_all, _tmp_data, t1, t2)
t3.name = "T3"
t1.start()
t2.start()
t1.join()
pkg_ids = _tmp_data['db'].listAllPackageIds()
t2.join()
t3.start()
t3.join()
self.assertTrue(_tmp_data['T1'] and _tmp_data['T2'] \
and _tmp_data['T3'])
self.assertTrue(1 in pkg_ids)
self.assertTrue(len(pkg_ids) == 1)
_tmp_data['db'].close()
with _tmp_data['db']._cursor_pool_mutex():
cur_cache = _tmp_data['db']._cursor_pool().keys()
self.assertEqual(len(cur_cache), 0) # nothing left
os.remove(_tmp_data['path'])
def test_db_reverse_deps(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
test_pkg2 = _misc.get_test_package2()
data2 = self.Spm.extract_package_metadata(test_pkg2)
data['pkg_dependencies'] += ((
_misc.get_test_package_atom2(),
etpConst['dependency_type_ids']['rdepend_id']),)
data2['pkg_dependencies'] += ((
_misc.get_test_package_atom(),
etpConst['dependency_type_ids']['rdepend_id']),)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(data)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(data, db_data)
idpackage2 = self.test_db.addPackage(data2)
db_data2 = self.test_db.getPackageData(idpackage2)
_misc.clean_pkg_metadata(data2)
_misc.clean_pkg_metadata(db_data2)
self.assertEqual(data2, db_data2)
rev_deps = self.test_db.retrieveReverseDependencies(idpackage)
rev_deps2 = self.test_db.retrieveReverseDependencies(idpackage2)
self.assertTrue(idpackage in rev_deps2)
self.assertTrue(idpackage2 in rev_deps)
rev_deps_t = self.test_db.retrieveReverseDependencies(idpackage,
key_slot = True)
self.assertEqual(rev_deps_t, (('app-dicts/aspell-es', '0'),))
pkg_data = self.test_db.retrieveUnusedPackageIds()
self.assertEqual(pkg_data, tuple())
def test_similar(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(data)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(data, db_data)
out = self.test_db.searchSimilarPackages(_misc.get_test_package_name())
self.assertEqual(out, (1,))
def test_search(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(data)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(data, db_data)
out = self.test_db.searchPackages(_misc.get_test_package_name())
self.assertEqual(out, (('sys-libs/zlib-1.2.3-r1', 1, '5'),))
out = self.test_db.searchPackages(_misc.get_test_package_name(),
slot = "0")
self.assertEqual(out, (('sys-libs/zlib-1.2.3-r1', 1, '5'),))
out = self.test_db.searchPackages(_misc.get_test_package_name(),
slot = "0", just_id = True)
self.assertEqual(out, (1,))
def test_list_packages(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = self.test_db.listAllPackages()
self.assertEqual(out, (('sys-libs/zlib-1.2.3-r1', 1, '5'),))
def test_spmuids(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = self.test_db.listAllSpmUids()
self.assertEqual(out, ((22331, 1),))
def test_list_pkg_ids(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = self.test_db.listAllPackageIds(order_by="atom")
self.assertEqual(out, (1,))
def test_list_files(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = sorted(self.test_db.listAllFiles())
self.assertEqual(out, sorted([
'/lib64/libz.so', '/usr/share/doc/zlib-1.2.3-r1',
'/usr/share/doc/zlib-1.2.3-r1/algorithm.txt.bz2',
'/usr/share/doc/zlib-1.2.3-r1/FAQ.bz2',
'/usr/share/doc/zlib-1.2.3-r1/ChangeLog.bz2',
'/usr', '/usr/include', '/usr/lib64',
'/usr/share/man/man3/zlib.3.bz2', '/usr/lib64/libz.a', '/lib64',
'/usr/share', '/usr/share/doc/zlib-1.2.3-r1/README.bz2',
'/usr/lib64/libz.so', '/usr/share/man', '/usr/include/zconf.h',
'/lib64/libz.so.1.2.3', '/usr/include/zlib.h', '/usr/share/doc',
'/usr/share/man/man3', '/lib64/libz.so.1'])
)
def test_list_categories(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = self.test_db.listAllCategories()
self.assertEqual(out, frozenset(('sys-libs',)))
def test_list_downloads(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
out = self.test_db.listAllDownloads()
self.assertEqual(out, ('sys-libs:zlib-1.2.3-r1.334f75538e6eddde753d9a247609dd8b1123a541.tbz2',))
def test_search_name(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(data)
_misc.clean_pkg_metadata(db_data)
self.assertEqual(db_data, data)
out = self.test_db.searchName(_misc.get_test_package_name())
self.assertEqual(out, frozenset([('sys-libs/zlib-1.2.3-r1', 1)]))
def test_db_indexes(self):
self.test_db.createAllIndexes()
def test_db_import_export(self):
test_pkg = _misc.get_test_package2()
data = self.Spm.extract_package_metadata(test_pkg)
# Portage stores them this way
data['changelog'] = const_convert_to_unicode(
"#248083).\n\n 06 Feb 2009; Ra\xc3\xbal Porcel")
data['license'] = const_convert_to_unicode('GPL-2')
data['licensedata'] = {
const_convert_to_unicode('GPL-2'): \
const_convert_to_unicode(
"#248083).\n\n 06 Feb 2009; Ra\xc3\xbal Porcel"),
}
idpackage = self.test_db.addPackage(data)
db_data = self.test_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(data)
_misc.clean_pkg_metadata(db_data)
assert set(data.keys()) == set(db_data.keys())
for k, v in data.items():
if v != db_data[k]:
print(k, v, "vs", db_data[k])
self.assertEqual(data, db_data)
set_mute(True)
# export
fd, buf_file = const_mkstemp()
os.close(fd)
buf = open(buf_file, "wb")
self.test_db.exportRepository(buf)
buf.flush()
buf.close()
fd, new_db_path = const_mkstemp()
os.close(fd)
self.test_db.importRepository(buf_file, new_db_path)
new_db = self.Client.open_generic_repository(new_db_path)
new_db_data = new_db.getPackageData(idpackage)
_misc.clean_pkg_metadata(new_db_data)
new_db.close()
set_mute(True)
self.assertEqual(new_db_data, db_data)
os.remove(buf_file)
os.remove(new_db_path)
def test_use_defaults(self):
test_pkg = _misc.get_test_package()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
key, slot = self.test_db.retrieveKeySlot(idpackage)
valid_test_deps = [
"%s[%s(+)]" % (key, "doesntexistforsure"),
"%s[-%s(-)]" % (key, "doesntexistforsure"),
"%s[%s(+)]" % (key, "kernel_linux"),
"%s[%s(-)]" % (key, "kernel_linux"),
]
invalid_test_deps = [
"%s[%s(-)]" % (key, "doesntexistforsure"),
"%s[-%s(+)]" % (key, "kernel_linux"),
"%s[-%s(+)]" % (key, "doesntexistforsure"),
"%s[-%s(-)]" % (key, "kernel_linux"),
]
for dep in valid_test_deps:
self.assertEqual((1, 0), self.test_db.atomMatch(dep))
for dep in invalid_test_deps:
self.assertEqual((-1, 1), self.test_db.atomMatch(dep))
def test_db_package_sets(self):
set_name = 'my_test_set'
set_deps = ["app-foo/foo", "app-pling/plong", "media-foo/ajez"]
set_name2 = 'my_test_set2'
set_deps2 = ["app-foo/foo2", "app-pling/plong2", "media-foo/ajez2"]
pkgsets = {
set_name: set(set_deps),
set_name2: set(set_deps2),
}
self.test_db.insertPackageSets(pkgsets)
self.assertEqual(self.test_db.retrievePackageSets(), pkgsets)
set_search = self.test_db.searchSets(set_name2)
self.assertEqual(set([set_name2]), set_search)
def test_db_license_data_str_insert(self):
lic_txt = const_convert_to_rawstring('[3]\xab foo\n\n', 'utf-8')
lic_name = const_convert_to_unicode('CCPL-Attribution-2.0')
lic_data = {lic_name: lic_txt}
self.test_db._insertLicenses(lic_data)
db_lic_txt = self.test_db.retrieveLicenseText(lic_name)
self.assertEqual(db_lic_txt, lic_txt)
def test_settings(self):
self.assertRaises(KeyError, self.test_db.getSetting, "fuck")
self.test_db._setSetting("something_cool", "abcdef\nabcdef")
self.assertEqual(self.test_db.getSetting("something_cool"),
"abcdef\nabcdef")
def test_new_entropyrepository_schema(self):
test_pkg = _misc.get_test_package2()
data = self.Spm.extract_package_metadata(test_pkg)
idpackage = self.test_db.addPackage(data)
old_data = self.test_db.getPackageData(idpackage)
old_base_data = self.test_db.getBaseData(idpackage)
old_cats = self.test_db.listAllCategories()
test_db = self.__open_test_db(":memory:")
idpackage = test_db.addPackage(data)
new_data = test_db.getPackageData(idpackage)
new_base_data = test_db.getBaseData(idpackage)
new_cats = test_db.listAllCategories()
self.assertTrue(test_db._isBaseinfoExtrainfo2010())
self.assertEqual(old_data, new_data)
self.assertEqual(old_base_data, new_base_data)
self.assertEqual(old_cats, new_cats)
test_db.close()
def test_preserved_libs(self):
data = self.test_db.listAllPreservedLibraries()
self.assertEqual(data, tuple())
atom = "app-foo/bar-1.2.3"
fake_data = [
("libfoo.so.3", 2, "/usr/lib64/libfoo.so.3", atom),
("libfoo.so.3", 2, "/usr/lib64/libfoo.so.3.0.0", atom),
("libfoo.so.3", 2, "/usr/lib64/libfoo.so", atom),
("libbar.so.10", 2, "/usr/lib64/libbar.so.10", atom),
("libbar.so.10", 2, "/usr/lib64/libbar.so", atom),
("libbar.so.10", 2, "/usr/lib64/libbar.so.10.0.0", atom),
]
for entry in fake_data:
self.test_db.insertPreservedLibrary(*entry)
# test that insert order equals list
data = self.test_db.listAllPreservedLibraries()
self.assertEqual(tuple(data), tuple(fake_data))
grouped = {}
for lib, elfclass, path, _atom in data:
obj = grouped.setdefault((lib, elfclass), [])
obj.append(path)
# make sure that retrieve works as intended
for (lib, elfclass), paths in grouped.items():
out_paths = self.test_db.retrievePreservedLibraries(
lib, elfclass)
self.assertEqual(sorted(out_paths), sorted(paths))
# test removal
count = len(data)
for lib, elfclass, path, _atom in data:
self.test_db.removePreservedLibrary(lib, elfclass, path)
count -= 1
current_preserved_libs = self.test_db.listAllPreservedLibraries()
self.assertEqual(len(current_preserved_libs), count)
self.assertTrue((lib, elfclass, path) not in current_preserved_libs)
data = self.test_db.listAllPreservedLibraries()
self.assertEqual(data, tuple())
def _test_repository_locking(self, test_db):
with test_db.shared():
self.assertRaises(RuntimeError, test_db.try_acquire_exclusive)
with test_db.exclusive():
opaque = test_db.try_acquire_shared()
self.assertNotEqual(opaque, None)
test_db.release_shared(opaque)
opaque_shared = test_db.try_acquire_shared()
self.assert_(opaque_shared is not None)
self.assertRaises(RuntimeError, test_db.try_acquire_exclusive)
test_db.release_shared(opaque_shared)
# test reentrancy
opaque_exclusive = test_db.try_acquire_exclusive()
self.assert_(opaque_exclusive is not None)
opaque_exclusive2 = test_db.try_acquire_exclusive()
self.assert_(opaque_exclusive2 is not None)
self.assert_(opaque_exclusive._file_lock_setup() is
opaque_exclusive2._file_lock_setup())
# test reference counting
self.assertEquals(
2,
opaque_exclusive._file_lock_setup()['count'])
test_db.release_exclusive(opaque_exclusive)
self.assertEquals(
1,
opaque_exclusive._file_lock_setup()['count'])
test_db.release_exclusive(opaque_exclusive2)
self.assertEquals(
0,
opaque_exclusive._file_lock_setup()['count'])
# test that refcount doesn't go below zero
self.assertRaises(
RuntimeError, test_db.release_exclusive, opaque_exclusive)
self.assertEquals(
0,
opaque_exclusive._file_lock_setup()['count'])
opaque_exclusive = test_db.try_acquire_exclusive()
self.assert_(opaque_exclusive is not None)
self.assertRaises(RuntimeError, test_db.release_shared,
opaque_exclusive)
test_db.release_exclusive(opaque_exclusive)
def test_locking_file(self):
fd, db_file = const_mkstemp()
os.close(fd)
test_db = None
try:
test_db = self.Client.open_generic_repository(db_file)
test_db.initializeRepository()
return self._test_repository_locking(test_db)
finally:
if test_db is not None:
test_db.close()
os.remove(db_file)
def test_locking_memory(self):
self.assert_(self.test_db._is_memory())
return self._test_repository_locking(self.test_db)
def test_direct_access(self):
local = self.test_db._tls
self.assertEquals(self.test_db.directed(), False)
counter = getattr(local, "_EntropyRepositoryCacheCounter", "foo")
self.assertEquals(counter, "foo")
with self.test_db.direct():
self.assertEquals(self.test_db.directed(), True)
counter = local._EntropyRepositoryCacheCounter
self.assertEquals(counter, 0)
self.assertEquals(self.test_db.directed(), False)
with self.test_db.direct():
counter = local._EntropyRepositoryCacheCounter
self.assertEquals(counter, 1)
with self.test_db.direct():
counter = local._EntropyRepositoryCacheCounter
self.assertEquals(counter, 2)
counter = local._EntropyRepositoryCacheCounter
self.assertEquals(counter, 1)
counter = local._EntropyRepositoryCacheCounter
self.assertEquals(counter, 0)
self.test_db._direct_enabled = True
self.assertEquals(self.test_db.directed(), True)
self.test_db._direct_enabled = False
self.assertEquals(self.test_db.directed(), False)
with self.test_db.direct():
opaque = self.test_db.try_acquire_exclusive()
self.assertTrue(opaque is not None)
self.assertEquals(opaque.directed(), True)
opaque_shared = self.test_db.try_acquire_shared()
self.assertTrue(opaque_shared is not None)
self.assertEquals(opaque_shared.directed(), True)
opaque_exclusive = self.test_db.try_acquire_exclusive()
self.assertTrue(opaque_exclusive is not None)
self.assertEquals(opaque_exclusive.directed(), True)
self.assertEquals(self.test_db.directed(), True)
self.assertRaises(
RuntimeError,
self.test_db.release_shared, opaque_exclusive)
self.test_db.release_exclusive(opaque_exclusive)
self.assertEquals(self.test_db.directed(), True)
self.assertRaises(
RuntimeError,
self.test_db.release_exclusive, opaque_shared)
self.test_db.release_shared(opaque_shared)
self.assertEquals(self.test_db.directed(), True)
self.assertRaises(
RuntimeError,
self.test_db.release_shared, opaque)
self.test_db.release_exclusive(opaque)
self.assertEquals(self.test_db.directed(), True)
if __name__ == '__main__':
unittest.main()
raise SystemExit(0)
| gpl-2.0 |
ToonTownInfiniteRepo/ToontownInfinite | Panda3D-1.9.0/python/Lib/multiprocessing/managers.py | 72 | 36586 | #
# Module providing the `SyncManager` class for dealing
# with shared objects
#
# multiprocessing/managers.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
#
# Imports
#
import os
import sys
import weakref
import threading
import array
import Queue
from traceback import format_exc
from multiprocessing import Process, current_process, active_children, Pool, util, connection
from multiprocessing.process import AuthenticationString
from multiprocessing.forking import exit, Popen, assert_spawning, ForkingPickler
from multiprocessing.util import Finalize, info
try:
from cPickle import PicklingError
except ImportError:
from pickle import PicklingError
#
# Register some things for pickling
#
def reduce_array(a):
return array.array, (a.typecode, a.tostring())
ForkingPickler.register(array.array, reduce_array)
view_types = [type(getattr({}, name)()) for name in ('items','keys','values')]
#
# Type for identifying shared objects
#
class Token(object):
'''
Type to uniquely indentify a shared object
'''
__slots__ = ('typeid', 'address', 'id')
def __init__(self, typeid, address, id):
(self.typeid, self.address, self.id) = (typeid, address, id)
def __getstate__(self):
return (self.typeid, self.address, self.id)
def __setstate__(self, state):
(self.typeid, self.address, self.id) = state
def __repr__(self):
return 'Token(typeid=%r, address=%r, id=%r)' % \
(self.typeid, self.address, self.id)
#
# Function for communication with a manager's server process
#
def dispatch(c, id, methodname, args=(), kwds={}):
'''
Send a message to manager using connection `c` and return response
'''
c.send((id, methodname, args, kwds))
kind, result = c.recv()
if kind == '#RETURN':
return result
raise convert_to_error(kind, result)
def convert_to_error(kind, result):
if kind == '#ERROR':
return result
elif kind == '#TRACEBACK':
assert type(result) is str
return RemoteError(result)
elif kind == '#UNSERIALIZABLE':
assert type(result) is str
return RemoteError('Unserializable message: %s\n' % result)
else:
return ValueError('Unrecognized message type')
class RemoteError(Exception):
def __str__(self):
return ('\n' + '-'*75 + '\n' + str(self.args[0]) + '-'*75)
#
# Functions for finding the method names of an object
#
def all_methods(obj):
'''
Return a list of names of methods of `obj`
'''
temp = []
for name in dir(obj):
func = getattr(obj, name)
if hasattr(func, '__call__'):
temp.append(name)
return temp
def public_methods(obj):
'''
Return a list of names of methods of `obj` which do not start with '_'
'''
return [name for name in all_methods(obj) if name[0] != '_']
#
# Server which is run in a process controlled by a manager
#
class Server(object):
'''
Server class which runs in a process controlled by a manager object
'''
public = ['shutdown', 'create', 'accept_connection', 'get_methods',
'debug_info', 'number_of_objects', 'dummy', 'incref', 'decref']
def __init__(self, registry, address, authkey, serializer):
assert isinstance(authkey, bytes)
self.registry = registry
self.authkey = AuthenticationString(authkey)
Listener, Client = listener_client[serializer]
# do authentication later
self.listener = Listener(address=address, backlog=16)
self.address = self.listener.address
self.id_to_obj = {'0': (None, ())}
self.id_to_refcount = {}
self.mutex = threading.RLock()
self.stop = 0
def serve_forever(self):
'''
Run the server forever
'''
current_process()._manager_server = self
try:
try:
while 1:
try:
c = self.listener.accept()
except (OSError, IOError):
continue
t = threading.Thread(target=self.handle_request, args=(c,))
t.daemon = True
t.start()
except (KeyboardInterrupt, SystemExit):
pass
finally:
self.stop = 999
self.listener.close()
def handle_request(self, c):
'''
Handle a new connection
'''
funcname = result = request = None
try:
connection.deliver_challenge(c, self.authkey)
connection.answer_challenge(c, self.authkey)
request = c.recv()
ignore, funcname, args, kwds = request
assert funcname in self.public, '%r unrecognized' % funcname
func = getattr(self, funcname)
except Exception:
msg = ('#TRACEBACK', format_exc())
else:
try:
result = func(c, *args, **kwds)
except Exception:
msg = ('#TRACEBACK', format_exc())
else:
msg = ('#RETURN', result)
try:
c.send(msg)
except Exception, e:
try:
c.send(('#TRACEBACK', format_exc()))
except Exception:
pass
util.info('Failure to send message: %r', msg)
util.info(' ... request was %r', request)
util.info(' ... exception was %r', e)
c.close()
def serve_client(self, conn):
'''
Handle requests from the proxies in a particular process/thread
'''
util.debug('starting server thread to service %r',
threading.current_thread().name)
recv = conn.recv
send = conn.send
id_to_obj = self.id_to_obj
while not self.stop:
try:
methodname = obj = None
request = recv()
ident, methodname, args, kwds = request
obj, exposed, gettypeid = id_to_obj[ident]
if methodname not in exposed:
raise AttributeError(
'method %r of %r object is not in exposed=%r' %
(methodname, type(obj), exposed)
)
function = getattr(obj, methodname)
try:
res = function(*args, **kwds)
except Exception, e:
msg = ('#ERROR', e)
else:
typeid = gettypeid and gettypeid.get(methodname, None)
if typeid:
rident, rexposed = self.create(conn, typeid, res)
token = Token(typeid, self.address, rident)
msg = ('#PROXY', (rexposed, token))
else:
msg = ('#RETURN', res)
except AttributeError:
if methodname is None:
msg = ('#TRACEBACK', format_exc())
else:
try:
fallback_func = self.fallback_mapping[methodname]
result = fallback_func(
self, conn, ident, obj, *args, **kwds
)
msg = ('#RETURN', result)
except Exception:
msg = ('#TRACEBACK', format_exc())
except EOFError:
util.debug('got EOF -- exiting thread serving %r',
threading.current_thread().name)
sys.exit(0)
except Exception:
msg = ('#TRACEBACK', format_exc())
try:
try:
send(msg)
except Exception, e:
send(('#UNSERIALIZABLE', repr(msg)))
except Exception, e:
util.info('exception in thread serving %r',
threading.current_thread().name)
util.info(' ... message was %r', msg)
util.info(' ... exception was %r', e)
conn.close()
sys.exit(1)
def fallback_getvalue(self, conn, ident, obj):
return obj
def fallback_str(self, conn, ident, obj):
return str(obj)
def fallback_repr(self, conn, ident, obj):
return repr(obj)
fallback_mapping = {
'__str__':fallback_str,
'__repr__':fallback_repr,
'#GETVALUE':fallback_getvalue
}
def dummy(self, c):
pass
def debug_info(self, c):
'''
Return some info --- useful to spot problems with refcounting
'''
self.mutex.acquire()
try:
result = []
keys = self.id_to_obj.keys()
keys.sort()
for ident in keys:
if ident != '0':
result.append(' %s: refcount=%s\n %s' %
(ident, self.id_to_refcount[ident],
str(self.id_to_obj[ident][0])[:75]))
return '\n'.join(result)
finally:
self.mutex.release()
def number_of_objects(self, c):
'''
Number of shared objects
'''
return len(self.id_to_obj) - 1 # don't count ident='0'
def shutdown(self, c):
'''
Shutdown this process
'''
try:
try:
util.debug('manager received shutdown message')
c.send(('#RETURN', None))
if sys.stdout != sys.__stdout__:
util.debug('resetting stdout, stderr')
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
util._run_finalizers(0)
for p in active_children():
util.debug('terminating a child process of manager')
p.terminate()
for p in active_children():
util.debug('terminating a child process of manager')
p.join()
util._run_finalizers()
util.info('manager exiting with exitcode 0')
except:
import traceback
traceback.print_exc()
finally:
exit(0)
def create(self, c, typeid, *args, **kwds):
'''
Create a new shared object and return its id
'''
self.mutex.acquire()
try:
callable, exposed, method_to_typeid, proxytype = \
self.registry[typeid]
if callable is None:
assert len(args) == 1 and not kwds
obj = args[0]
else:
obj = callable(*args, **kwds)
if exposed is None:
exposed = public_methods(obj)
if method_to_typeid is not None:
assert type(method_to_typeid) is dict
exposed = list(exposed) + list(method_to_typeid)
ident = '%x' % id(obj) # convert to string because xmlrpclib
# only has 32 bit signed integers
util.debug('%r callable returned object with id %r', typeid, ident)
self.id_to_obj[ident] = (obj, set(exposed), method_to_typeid)
if ident not in self.id_to_refcount:
self.id_to_refcount[ident] = 0
# increment the reference count immediately, to avoid
# this object being garbage collected before a Proxy
# object for it can be created. The caller of create()
# is responsible for doing a decref once the Proxy object
# has been created.
self.incref(c, ident)
return ident, tuple(exposed)
finally:
self.mutex.release()
def get_methods(self, c, token):
'''
Return the methods of the shared object indicated by token
'''
return tuple(self.id_to_obj[token.id][1])
def accept_connection(self, c, name):
'''
Spawn a new thread to serve this connection
'''
threading.current_thread().name = name
c.send(('#RETURN', None))
self.serve_client(c)
def incref(self, c, ident):
self.mutex.acquire()
try:
self.id_to_refcount[ident] += 1
finally:
self.mutex.release()
def decref(self, c, ident):
self.mutex.acquire()
try:
assert self.id_to_refcount[ident] >= 1
self.id_to_refcount[ident] -= 1
if self.id_to_refcount[ident] == 0:
del self.id_to_obj[ident], self.id_to_refcount[ident]
util.debug('disposing of obj with id %r', ident)
finally:
self.mutex.release()
#
# Class to represent state of a manager
#
class State(object):
__slots__ = ['value']
INITIAL = 0
STARTED = 1
SHUTDOWN = 2
#
# Mapping from serializer name to Listener and Client types
#
listener_client = {
'pickle' : (connection.Listener, connection.Client),
'xmlrpclib' : (connection.XmlListener, connection.XmlClient)
}
#
# Definition of BaseManager
#
class BaseManager(object):
'''
Base class for managers
'''
_registry = {}
_Server = Server
def __init__(self, address=None, authkey=None, serializer='pickle'):
if authkey is None:
authkey = current_process().authkey
self._address = address # XXX not final address if eg ('', 0)
self._authkey = AuthenticationString(authkey)
self._state = State()
self._state.value = State.INITIAL
self._serializer = serializer
self._Listener, self._Client = listener_client[serializer]
def __reduce__(self):
return type(self).from_address, \
(self._address, self._authkey, self._serializer)
def get_server(self):
'''
Return server object with serve_forever() method and address attribute
'''
assert self._state.value == State.INITIAL
return Server(self._registry, self._address,
self._authkey, self._serializer)
def connect(self):
'''
Connect manager object to the server process
'''
Listener, Client = listener_client[self._serializer]
conn = Client(self._address, authkey=self._authkey)
dispatch(conn, None, 'dummy')
self._state.value = State.STARTED
def start(self, initializer=None, initargs=()):
'''
Spawn a server process for this manager object
'''
assert self._state.value == State.INITIAL
if initializer is not None and not hasattr(initializer, '__call__'):
raise TypeError('initializer must be a callable')
# pipe over which we will retrieve address of server
reader, writer = connection.Pipe(duplex=False)
# spawn process which runs a server
self._process = Process(
target=type(self)._run_server,
args=(self._registry, self._address, self._authkey,
self._serializer, writer, initializer, initargs),
)
ident = ':'.join(str(i) for i in self._process._identity)
self._process.name = type(self).__name__ + '-' + ident
self._process.start()
# get address of server
writer.close()
self._address = reader.recv()
reader.close()
# register a finalizer
self._state.value = State.STARTED
self.shutdown = util.Finalize(
self, type(self)._finalize_manager,
args=(self._process, self._address, self._authkey,
self._state, self._Client),
exitpriority=0
)
@classmethod
def _run_server(cls, registry, address, authkey, serializer, writer,
initializer=None, initargs=()):
'''
Create a server, report its address and run it
'''
if initializer is not None:
initializer(*initargs)
# create server
server = cls._Server(registry, address, authkey, serializer)
# inform parent process of the server's address
writer.send(server.address)
writer.close()
# run the manager
util.info('manager serving at %r', server.address)
server.serve_forever()
def _create(self, typeid, *args, **kwds):
'''
Create a new shared object; return the token and exposed tuple
'''
assert self._state.value == State.STARTED, 'server not yet started'
conn = self._Client(self._address, authkey=self._authkey)
try:
id, exposed = dispatch(conn, None, 'create', (typeid,)+args, kwds)
finally:
conn.close()
return Token(typeid, self._address, id), exposed
def join(self, timeout=None):
'''
Join the manager process (if it has been spawned)
'''
self._process.join(timeout)
def _debug_info(self):
'''
Return some info about the servers shared objects and connections
'''
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'debug_info')
finally:
conn.close()
def _number_of_objects(self):
'''
Return the number of shared objects
'''
conn = self._Client(self._address, authkey=self._authkey)
try:
return dispatch(conn, None, 'number_of_objects')
finally:
conn.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown()
@staticmethod
def _finalize_manager(process, address, authkey, state, _Client):
'''
Shutdown the manager process; will be registered as a finalizer
'''
if process.is_alive():
util.info('sending shutdown message to manager')
try:
conn = _Client(address, authkey=authkey)
try:
dispatch(conn, None, 'shutdown')
finally:
conn.close()
except Exception:
pass
process.join(timeout=0.2)
if process.is_alive():
util.info('manager still alive')
if hasattr(process, 'terminate'):
util.info('trying to `terminate()` manager process')
process.terminate()
process.join(timeout=0.1)
if process.is_alive():
util.info('manager still alive after terminate')
state.value = State.SHUTDOWN
try:
del BaseProxy._address_to_local[address]
except KeyError:
pass
address = property(lambda self: self._address)
@classmethod
def register(cls, typeid, callable=None, proxytype=None, exposed=None,
method_to_typeid=None, create_method=True):
'''
Register a typeid with the manager type
'''
if '_registry' not in cls.__dict__:
cls._registry = cls._registry.copy()
if proxytype is None:
proxytype = AutoProxy
exposed = exposed or getattr(proxytype, '_exposed_', None)
method_to_typeid = method_to_typeid or \
getattr(proxytype, '_method_to_typeid_', None)
if method_to_typeid:
for key, value in method_to_typeid.items():
assert type(key) is str, '%r is not a string' % key
assert type(value) is str, '%r is not a string' % value
cls._registry[typeid] = (
callable, exposed, method_to_typeid, proxytype
)
if create_method:
def temp(self, *args, **kwds):
util.debug('requesting creation of a shared %r object', typeid)
token, exp = self._create(typeid, *args, **kwds)
proxy = proxytype(
token, self._serializer, manager=self,
authkey=self._authkey, exposed=exp
)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
temp.__name__ = typeid
setattr(cls, typeid, temp)
#
# Subclass of set which get cleared after a fork
#
class ProcessLocalSet(set):
def __init__(self):
util.register_after_fork(self, lambda obj: obj.clear())
def __reduce__(self):
return type(self), ()
#
# Definition of BaseProxy
#
class BaseProxy(object):
'''
A base for proxies of shared objects
'''
_address_to_local = {}
_mutex = util.ForkAwareThreadLock()
def __init__(self, token, serializer, manager=None,
authkey=None, exposed=None, incref=True):
BaseProxy._mutex.acquire()
try:
tls_idset = BaseProxy._address_to_local.get(token.address, None)
if tls_idset is None:
tls_idset = util.ForkAwareLocal(), ProcessLocalSet()
BaseProxy._address_to_local[token.address] = tls_idset
finally:
BaseProxy._mutex.release()
# self._tls is used to record the connection used by this
# thread to communicate with the manager at token.address
self._tls = tls_idset[0]
# self._idset is used to record the identities of all shared
# objects for which the current process owns references and
# which are in the manager at token.address
self._idset = tls_idset[1]
self._token = token
self._id = self._token.id
self._manager = manager
self._serializer = serializer
self._Client = listener_client[serializer][1]
if authkey is not None:
self._authkey = AuthenticationString(authkey)
elif self._manager is not None:
self._authkey = self._manager._authkey
else:
self._authkey = current_process().authkey
if incref:
self._incref()
util.register_after_fork(self, BaseProxy._after_fork)
def _connect(self):
util.debug('making connection to manager')
name = current_process().name
if threading.current_thread().name != 'MainThread':
name += '|' + threading.current_thread().name
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'accept_connection', (name,))
self._tls.connection = conn
def _callmethod(self, methodname, args=(), kwds={}):
'''
Try to call a method of the referrent and return a copy of the result
'''
try:
conn = self._tls.connection
except AttributeError:
util.debug('thread %r does not own a connection',
threading.current_thread().name)
self._connect()
conn = self._tls.connection
conn.send((self._id, methodname, args, kwds))
kind, result = conn.recv()
if kind == '#RETURN':
return result
elif kind == '#PROXY':
exposed, token = result
proxytype = self._manager._registry[token.typeid][-1]
proxy = proxytype(
token, self._serializer, manager=self._manager,
authkey=self._authkey, exposed=exposed
)
conn = self._Client(token.address, authkey=self._authkey)
dispatch(conn, None, 'decref', (token.id,))
return proxy
raise convert_to_error(kind, result)
def _getvalue(self):
'''
Get a copy of the value of the referent
'''
return self._callmethod('#GETVALUE')
def _incref(self):
conn = self._Client(self._token.address, authkey=self._authkey)
dispatch(conn, None, 'incref', (self._id,))
util.debug('INCREF %r', self._token.id)
self._idset.add(self._id)
state = self._manager and self._manager._state
self._close = util.Finalize(
self, BaseProxy._decref,
args=(self._token, self._authkey, state,
self._tls, self._idset, self._Client),
exitpriority=10
)
@staticmethod
def _decref(token, authkey, state, tls, idset, _Client):
idset.discard(token.id)
# check whether manager is still alive
if state is None or state.value == State.STARTED:
# tell manager this process no longer cares about referent
try:
util.debug('DECREF %r', token.id)
conn = _Client(token.address, authkey=authkey)
dispatch(conn, None, 'decref', (token.id,))
except Exception, e:
util.debug('... decref failed %s', e)
else:
util.debug('DECREF %r -- manager already shutdown', token.id)
# check whether we can close this thread's connection because
# the process owns no more references to objects for this manager
if not idset and hasattr(tls, 'connection'):
util.debug('thread %r has no more proxies so closing conn',
threading.current_thread().name)
tls.connection.close()
del tls.connection
def _after_fork(self):
self._manager = None
try:
self._incref()
except Exception, e:
# the proxy may just be for a manager which has shutdown
util.info('incref failed: %s' % e)
def __reduce__(self):
kwds = {}
if Popen.thread_is_spawning():
kwds['authkey'] = self._authkey
if getattr(self, '_isauto', False):
kwds['exposed'] = self._exposed_
return (RebuildProxy,
(AutoProxy, self._token, self._serializer, kwds))
else:
return (RebuildProxy,
(type(self), self._token, self._serializer, kwds))
def __deepcopy__(self, memo):
return self._getvalue()
def __repr__(self):
return '<%s object, typeid %r at %s>' % \
(type(self).__name__, self._token.typeid, '0x%x' % id(self))
def __str__(self):
'''
Return representation of the referent (or a fall-back if that fails)
'''
try:
return self._callmethod('__repr__')
except Exception:
return repr(self)[:-1] + "; '__str__()' failed>"
#
# Function used for unpickling
#
def RebuildProxy(func, token, serializer, kwds):
'''
Function used for unpickling proxy objects.
If possible the shared object is returned, or otherwise a proxy for it.
'''
server = getattr(current_process(), '_manager_server', None)
if server and server.address == token.address:
return server.id_to_obj[token.id][0]
else:
incref = (
kwds.pop('incref', True) and
not getattr(current_process(), '_inheriting', False)
)
return func(token, serializer, incref=incref, **kwds)
#
# Functions to create proxies and proxy types
#
def MakeProxyType(name, exposed, _cache={}):
'''
Return an proxy type whose methods are given by `exposed`
'''
exposed = tuple(exposed)
try:
return _cache[(name, exposed)]
except KeyError:
pass
dic = {}
for meth in exposed:
exec '''def %s(self, *args, **kwds):
return self._callmethod(%r, args, kwds)''' % (meth, meth) in dic
ProxyType = type(name, (BaseProxy,), dic)
ProxyType._exposed_ = exposed
_cache[(name, exposed)] = ProxyType
return ProxyType
def AutoProxy(token, serializer, manager=None, authkey=None,
exposed=None, incref=True):
'''
Return an auto-proxy for `token`
'''
_Client = listener_client[serializer][1]
if exposed is None:
conn = _Client(token.address, authkey=authkey)
try:
exposed = dispatch(conn, None, 'get_methods', (token,))
finally:
conn.close()
if authkey is None and manager is not None:
authkey = manager._authkey
if authkey is None:
authkey = current_process().authkey
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
incref=incref)
proxy._isauto = True
return proxy
#
# Types/callables which we will register with SyncManager
#
class Namespace(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = self.__dict__.items()
temp = []
for name, value in items:
if not name.startswith('_'):
temp.append('%s=%r' % (name, value))
temp.sort()
return 'Namespace(%s)' % str.join(', ', temp)
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def get(self):
return self._value
def set(self, value):
self._value = value
def __repr__(self):
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
value = property(get, set)
def Array(typecode, sequence, lock=True):
return array.array(typecode, sequence)
#
# Proxy types used by SyncManager
#
class IteratorProxy(BaseProxy):
# XXX remove methods for Py3.0 and Py2.6
_exposed_ = ('__next__', 'next', 'send', 'throw', 'close')
def __iter__(self):
return self
def __next__(self, *args):
return self._callmethod('__next__', args)
def next(self, *args):
return self._callmethod('next', args)
def send(self, *args):
return self._callmethod('send', args)
def throw(self, *args):
return self._callmethod('throw', args)
def close(self, *args):
return self._callmethod('close', args)
class AcquirerProxy(BaseProxy):
_exposed_ = ('acquire', 'release')
def acquire(self, blocking=True):
return self._callmethod('acquire', (blocking,))
def release(self):
return self._callmethod('release')
def __enter__(self):
return self._callmethod('acquire')
def __exit__(self, exc_type, exc_val, exc_tb):
return self._callmethod('release')
class ConditionProxy(AcquirerProxy):
# XXX will Condition.notfyAll() name be available in Py3.0?
_exposed_ = ('acquire', 'release', 'wait', 'notify', 'notify_all')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
def notify(self):
return self._callmethod('notify')
def notify_all(self):
return self._callmethod('notify_all')
class EventProxy(BaseProxy):
_exposed_ = ('is_set', 'set', 'clear', 'wait')
def is_set(self):
return self._callmethod('is_set')
def set(self):
return self._callmethod('set')
def clear(self):
return self._callmethod('clear')
def wait(self, timeout=None):
return self._callmethod('wait', (timeout,))
class NamespaceProxy(BaseProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
def __getattr__(self, key):
if key[0] == '_':
return object.__getattribute__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__getattribute__', (key,))
def __setattr__(self, key, value):
if key[0] == '_':
return object.__setattr__(self, key, value)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__setattr__', (key, value))
def __delattr__(self, key):
if key[0] == '_':
return object.__delattr__(self, key)
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__delattr__', (key,))
class ValueProxy(BaseProxy):
_exposed_ = ('get', 'set')
def get(self):
return self._callmethod('get')
def set(self, value):
return self._callmethod('set', (value,))
value = property(get, set)
BaseListProxy = MakeProxyType('BaseListProxy', (
'__add__', '__contains__', '__delitem__', '__delslice__',
'__getitem__', '__getslice__', '__len__', '__mul__',
'__reversed__', '__rmul__', '__setitem__', '__setslice__',
'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
'reverse', 'sort', '__imul__'
)) # XXX __getslice__ and __setslice__ unneeded in Py3.0
class ListProxy(BaseListProxy):
def __iadd__(self, value):
self._callmethod('extend', (value,))
return self
def __imul__(self, value):
self._callmethod('__imul__', (value,))
return self
DictProxy = MakeProxyType('DictProxy', (
'__contains__', '__delitem__', '__getitem__', '__len__',
'__setitem__', 'clear', 'copy', 'get', 'has_key', 'items',
'keys', 'pop', 'popitem', 'setdefault', 'update', 'values'
))
ArrayProxy = MakeProxyType('ArrayProxy', (
'__len__', '__getitem__', '__setitem__', '__getslice__', '__setslice__'
)) # XXX __getslice__ and __setslice__ unneeded in Py3.0
PoolProxy = MakeProxyType('PoolProxy', (
'apply', 'apply_async', 'close', 'imap', 'imap_unordered', 'join',
'map', 'map_async', 'terminate'
))
PoolProxy._method_to_typeid_ = {
'apply_async': 'AsyncResult',
'map_async': 'AsyncResult',
'imap': 'Iterator',
'imap_unordered': 'Iterator'
}
#
# Definition of SyncManager
#
class SyncManager(BaseManager):
'''
Subclass of `BaseManager` which supports a number of shared object types.
The types registered are those intended for the synchronization
of threads, plus `dict`, `list` and `Namespace`.
The `multiprocessing.Manager()` function creates started instances of
this class.
'''
SyncManager.register('Queue', Queue.Queue)
SyncManager.register('JoinableQueue', Queue.Queue)
SyncManager.register('Event', threading.Event, EventProxy)
SyncManager.register('Lock', threading.Lock, AcquirerProxy)
SyncManager.register('RLock', threading.RLock, AcquirerProxy)
SyncManager.register('Semaphore', threading.Semaphore, AcquirerProxy)
SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore,
AcquirerProxy)
SyncManager.register('Condition', threading.Condition, ConditionProxy)
SyncManager.register('Pool', Pool, PoolProxy)
SyncManager.register('list', list, ListProxy)
SyncManager.register('dict', dict, DictProxy)
SyncManager.register('Value', Value, ValueProxy)
SyncManager.register('Array', Array, ArrayProxy)
SyncManager.register('Namespace', Namespace, NamespaceProxy)
# types returned by methods of PoolProxy
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
SyncManager.register('AsyncResult', create_method=False)
| mit |
home-assistant/home-assistant | homeassistant/components/enocean/__init__.py | 18 | 1658 | """Support for EnOcean devices."""
import voluptuous as vol
from homeassistant import config_entries, core
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_DEVICE
import homeassistant.helpers.config_validation as cv
from .const import DATA_ENOCEAN, DOMAIN, ENOCEAN_DONGLE
from .dongle import EnOceanDongle
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.string})}, extra=vol.ALLOW_EXTRA
)
async def async_setup(hass, config):
"""Set up the EnOcean component."""
# support for text-based configuration (legacy)
if DOMAIN not in config:
return True
if hass.config_entries.async_entries(DOMAIN):
# We can only have one dongle. If there is already one in the config,
# there is no need to import the yaml based config.
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Set up an EnOcean dongle for the given entry."""
enocean_data = hass.data.setdefault(DATA_ENOCEAN, {})
usb_dongle = EnOceanDongle(hass, config_entry.data[CONF_DEVICE])
await usb_dongle.async_setup()
enocean_data[ENOCEAN_DONGLE] = usb_dongle
return True
async def async_unload_entry(hass, config_entry):
"""Unload ENOcean config entry."""
enocean_dongle = hass.data[DATA_ENOCEAN][ENOCEAN_DONGLE]
enocean_dongle.unload()
hass.data.pop(DATA_ENOCEAN)
return True
| apache-2.0 |
lthurlow/Network-Grapher | proj/external/numpy-1.7.0/numpy/f2py/setup.py | 38 | 4282 | #!/usr/bin/env python
"""
setup.py for installing F2PY
Usage:
python setup.py install
Copyright 2001-2005 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Revision: 1.32 $
$Date: 2005/01/30 17:22:14 $
Pearu Peterson
"""
__version__ = "$Id: setup.py,v 1.32 2005/01/30 17:22:14 pearu Exp $"
import os
import sys
from distutils.dep_util import newer
from numpy.distutils import log
from numpy.distutils.core import setup
from numpy.distutils.misc_util import Configuration
from __version__ import version
def configuration(parent_package='',top_path=None):
config = Configuration('f2py', parent_package, top_path)
config.add_data_dir('docs')
config.add_data_dir('tests')
config.add_data_files('src/fortranobject.c',
'src/fortranobject.h',
'f2py.1'
)
config.make_svn_version_py()
def generate_f2py_py(build_dir):
f2py_exe = 'f2py'+os.path.basename(sys.executable)[6:]
if f2py_exe[-4:]=='.exe':
f2py_exe = f2py_exe[:-4] + '.py'
if 'bdist_wininst' in sys.argv and f2py_exe[-3:] != '.py':
f2py_exe = f2py_exe + '.py'
target = os.path.join(build_dir,f2py_exe)
if newer(__file__,target):
log.info('Creating %s', target)
f = open(target,'w')
f.write('''\
#!/usr/bin/env %s
# See http://cens.ioc.ee/projects/f2py2e/
import os, sys
for mode in ["g3-numpy", "2e-numeric", "2e-numarray", "2e-numpy"]:
try:
i=sys.argv.index("--"+mode)
del sys.argv[i]
break
except ValueError: pass
os.environ["NO_SCIPY_IMPORT"]="f2py"
if mode=="g3-numpy":
sys.stderr.write("G3 f2py support is not implemented, yet.\\n")
sys.exit(1)
elif mode=="2e-numeric":
from f2py2e import main
elif mode=="2e-numarray":
sys.argv.append("-DNUMARRAY")
from f2py2e import main
elif mode=="2e-numpy":
from numpy.f2py import main
else:
sys.stderr.write("Unknown mode: " + repr(mode) + "\\n")
sys.exit(1)
main()
'''%(os.path.basename(sys.executable)))
f.close()
return target
config.add_scripts(generate_f2py_py)
log.info('F2PY Version %s', config.get_version())
return config
if __name__ == "__main__":
config = configuration(top_path='')
version = config.get_version()
print('F2PY Version',version)
config = config.todict()
if sys.version[:3]>='2.3':
config['download_url'] = "http://cens.ioc.ee/projects/f2py2e/2.x"\
"/F2PY-2-latest.tar.gz"
config['classifiers'] = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: NumPy License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: C',
'Programming Language :: Fortran',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Code Generators',
]
setup(version=version,
description = "F2PY - Fortran to Python Interface Generaton",
author = "Pearu Peterson",
author_email = "pearu@cens.ioc.ee",
maintainer = "Pearu Peterson",
maintainer_email = "pearu@cens.ioc.ee",
license = "BSD",
platforms = "Unix, Windows (mingw|cygwin), Mac OSX",
long_description = """\
The Fortran to Python Interface Generator, or F2PY for short, is a
command line tool (f2py) for generating Python C/API modules for
wrapping Fortran 77/90/95 subroutines, accessing common blocks from
Python, and calling Python functions from Fortran (call-backs).
Interfacing subroutines/data from Fortran 90/95 modules is supported.""",
url = "http://cens.ioc.ee/projects/f2py2e/",
keywords = ['Fortran','f2py'],
**config)
| mit |
spcui/autotest | database_legacy/migrations/060_add_planner_additional_parameters.py | 18 | 1463 | UP_SQL = """
CREATE TABLE planner_additional_parameters (
id INT PRIMARY KEY AUTO_INCREMENT,
plan_id INT NOT NULL,
hostname_regex VARCHAR(255) NOT NULL,
param_type VARCHAR(32) NOT NULL,
application_order INT NOT NULL
) ENGINE = InnoDB;
ALTER TABLE planner_additional_parameters
ADD CONSTRAINT planner_additional_parameters_plan_ibfk
FOREIGN KEY (plan_id) REFERENCES planner_plans (id);
ALTER TABLE planner_additional_parameters
ADD CONSTRAINT planner_additional_parameters_unique
UNIQUE KEY (plan_id, hostname_regex, param_type);
CREATE TABLE planner_additional_parameter_values (
id INT PRIMARY KEY AUTO_INCREMENT,
additional_parameter_id INT NOT NULL,
`key` VARCHAR(255) NOT NULL,
value VARCHAR(255) NOT NULL
) ENGINE = InnoDB;
ALTER TABLE planner_additional_parameter_values
ADD CONSTRAINT planner_additional_parameter_values_additional_parameter_ibfk
FOREIGN KEY (additional_parameter_id)
REFERENCES planner_additional_parameters (id);
ALTER TABLE planner_additional_parameter_values
ADD CONSTRAINT planner_additional_parameter_values_unique
UNIQUE KEY (additional_parameter_id, `key`);
"""
DOWN_SQL = """
ALTER TABLE planner_additional_parameter_values
DROP FOREIGN KEY planner_additional_parameter_values_additional_parameter_ibfk;
DROP TABLE planner_additional_parameter_values;
ALTER TABLE planner_additional_parameters
DROP FOREIGN KEY planner_additional_parameters_plan_ibfk;
DROP TABLE planner_additional_parameters;
"""
| gpl-2.0 |
toninhofpt/my-first-blog1 | myven/lib/python3.4/site-packages/pip/_vendor/lockfile/symlinklockfile.py | 536 | 2616 | from __future__ import absolute_import
import os
import time
from . import (LockBase, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class SymlinkLockFile(LockBase):
"""Lock access to a file using symlink(2)."""
def __init__(self, path, threaded=True, timeout=None):
# super(SymlinkLockFile).__init(...)
LockBase.__init__(self, path, threaded, timeout)
# split it back!
self.unique_name = os.path.split(self.unique_name)[1]
def acquire(self, timeout=None):
# Hopefully unnecessary for symlink.
# try:
# open(self.unique_name, "wb").close()
# except IOError:
# raise LockFailed("failed to create %s" % self.unique_name)
timeout = timeout if timeout is not None else self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a symbolic link to it.
try:
os.symlink(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
if self.i_am_locking():
# Linked to out unique name. Proceed.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(timeout / 10 if timeout is not None else 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
elif not self.i_am_locking():
raise NotMyLock("%s is locked, but not by me" % self.path)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.islink(self.lock_file)
def i_am_locking(self):
return (os.path.islink(self.lock_file)
and os.readlink(self.lock_file) == self.unique_name)
def break_lock(self):
if os.path.islink(self.lock_file): # exists && link
os.unlink(self.lock_file)
| gpl-3.0 |
OctopusInfo/ServerTools | server_shutdown_based_on_network_speed.py | 1 | 1167 | ## Download Monitor v0.1 - March 2012
# Set the interface you wish to monitor, eg: eth0, wlan0, usb0
INTERFACE = "eth0"
# Set the minimum download speed in KB/s that must be achieved.
MINIMUM_SPEED = 15
MAXIMUM_SPEED = 2500 #2.5 MB/s or 20Mb/s
# Set the number of retries to test for the average minimum speed. If the average speed is less
# than the minimum speed for x number of retries, then shutdown.
RETRIES = 3
# Set the interval (in seconds), between retries to test for the minimum speed.
INTERVAL = 5
import os, time
from commands import getoutput
def worker ():
RETRIES_COUNT = RETRIES
while True:
SPEED = int(float(getoutput("ifstat -i %s 1 1 | awk '{print $2}' | sed -n '3p'" % INTERFACE)))
if (SPEED > MAXIMUM_SPEED and RETRIES_COUNT <= 0):
print "!!!shutdown now"
os.system("shutdown -h now")
elif SPEED > MAXIMUM_SPEED:
print "!!!event detected, speed is %d" % (SPEED)
RETRIES_COUNT -= 1
time.sleep(INTERVAL)
else:
print "...idle, speed is %d" % (SPEED)
RETRIES_COUNT = RETRIES
time.sleep(INTERVAL)
worker()
| mit |
alobbs/ansible | lib/ansible/plugins/filter/core.py | 10 | 10867 | # (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import sys
import base64
import json
import os.path
import types
import pipes
import glob
import re
import crypt
import hashlib
import string
from functools import partial
import operator as py_operator
from random import SystemRandom, shuffle
import uuid
import yaml
from jinja2.filters import environmentfilter
from distutils.version import LooseVersion, StrictVersion
from ansible import errors
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.utils.hashing import md5s, checksum_s
from ansible.utils.unicode import unicode_wrap, to_unicode
try:
import passlib.hash
HAS_PASSLIB = True
except:
HAS_PASSLIB = False
UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E')
def to_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, **kw)
return to_unicode(transformed)
def to_nice_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=4, allow_unicode=True, default_flow_style=False, **kw)
return to_unicode(transformed)
def to_json(a, *args, **kw):
''' Convert the value to JSON '''
return json.dumps(a, *args, **kw)
def to_nice_json(a, *args, **kw):
'''Make verbose, human readable JSON'''
# python-2.6's json encoder is buggy (can't encode hostvars)
if sys.version_info < (2, 7):
try:
import simplejson
except ImportError:
pass
else:
try:
major = int(simplejson.__version__.split('.')[0])
except:
pass
else:
if major >= 2:
return simplejson.dumps(a, indent=4, sort_keys=True, *args, **kw)
# Fallback to the to_json filter
return to_json(a, *args, **kw)
return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
def failed(*a, **kw):
''' Test if task result yields failed '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|failed expects a dictionary")
rc = item.get('rc',0)
failed = item.get('failed',False)
if rc != 0 or failed:
return True
else:
return False
def success(*a, **kw):
''' Test if task result yields success '''
return not failed(*a, **kw)
def changed(*a, **kw):
''' Test if task result yields changed '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|changed expects a dictionary")
if not 'changed' in item:
changed = False
if ('results' in item # some modules return a 'results' key
and type(item['results']) == list
and type(item['results'][0]) == dict):
for result in item['results']:
changed = changed or result.get('changed', False)
else:
changed = item.get('changed', False)
return changed
def skipped(*a, **kw):
''' Test if task result yields skipped '''
item = a[0]
if type(item) != dict:
raise errors.AnsibleFilterError("|skipped expects a dictionary")
skipped = item.get('skipped', False)
return skipped
def mandatory(a):
''' Make a variable mandatory '''
try:
a
except NameError:
raise errors.AnsibleFilterError('Mandatory variable not defined.')
else:
return a
def bool(a):
''' return a bool for the arg '''
if a is None or type(a) == bool:
return a
if type(a) in types.StringTypes:
a = a.lower()
if a in ['yes', 'on', '1', 'true', 1]:
return True
else:
return False
def quote(a):
''' return its argument quoted for shell usage '''
return pipes.quote(a)
def fileglob(pathname):
''' return list of matched files for glob '''
return glob.glob(pathname)
def regex(value='', pattern='', ignorecase=False, match_type='search'):
''' Expose `re` as a boolean filter using the `search` method by default.
This is likely only useful for `search` and `match` which already
have their own filters.
'''
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
_bool = __builtins__.get('bool')
return _bool(getattr(_re, match_type, 'search')(value))
def match(value, pattern='', ignorecase=False):
''' Perform a `re.match` returning a boolean '''
return regex(value, pattern, ignorecase, 'match')
def search(value, pattern='', ignorecase=False):
''' Perform a `re.search` returning a boolean '''
return regex(value, pattern, ignorecase, 'search')
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
if not isinstance(value, basestring):
value = str(value)
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
return _re.sub(replacement, value)
def ternary(value, true_val, false_val):
''' value ? true_val : false_val '''
if value:
return true_val
else:
return false_val
def version_compare(value, version, operator='eq', strict=False):
''' Perform a version comparison on a value '''
op_map = {
'==': 'eq', '=': 'eq', 'eq': 'eq',
'<': 'lt', 'lt': 'lt',
'<=': 'le', 'le': 'le',
'>': 'gt', 'gt': 'gt',
'>=': 'ge', 'ge': 'ge',
'!=': 'ne', '<>': 'ne', 'ne': 'ne'
}
if strict:
Version = StrictVersion
else:
Version = LooseVersion
if operator in op_map:
operator = op_map[operator]
else:
raise errors.AnsibleFilterError('Invalid operator type')
try:
method = getattr(py_operator, operator)
return method(Version(str(value)), Version(str(version)))
except Exception, e:
raise errors.AnsibleFilterError('Version comparison: %s' % e)
@environmentfilter
def rand(environment, end, start=None, step=None):
r = SystemRandom()
if isinstance(end, (int, long)):
if not start:
start = 0
if not step:
step = 1
return r.randrange(start, end, step)
elif hasattr(end, '__iter__'):
if start or step:
raise errors.AnsibleFilterError('start and step can only be used with integer values')
return r.choice(end)
else:
raise errors.AnsibleFilterError('random can only be used on sequences and integers')
def randomize_list(mylist):
try:
mylist = list(mylist)
shuffle(mylist)
except:
pass
return mylist
def get_hash(data, hashtype='sha1'):
try: # see if hash is supported
h = hashlib.new(hashtype)
except:
return None
h.update(data)
return h.hexdigest()
def get_encrypted_password(password, hashtype='sha512', salt=None):
# TODO: find a way to construct dynamically from system
cryptmethod= {
'md5': '1',
'blowfish': '2a',
'sha256': '5',
'sha512': '6',
}
hastype = hashtype.lower()
if hashtype in cryptmethod:
if salt is None:
r = SystemRandom()
salt = ''.join([r.choice(string.ascii_letters + string.digits) for _ in range(16)])
if not HAS_PASSLIB:
if sys.platform.startswith('darwin'):
raise errors.AnsibleFilterError('|password_hash requires the passlib python module to generate password hashes on Mac OS X/Darwin')
saltstring = "$%s$%s" % (cryptmethod[hashtype],salt)
encrypted = crypt.crypt(password, saltstring)
else:
cls = getattr(passlib.hash, '%s_crypt' % hashtype)
encrypted = cls.encrypt(password, salt=salt)
return encrypted
return None
def to_uuid(string):
return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
# base 64
'b64decode': partial(unicode_wrap, base64.b64decode),
'b64encode': partial(unicode_wrap, base64.b64encode),
# uuid
'to_uuid': to_uuid,
# json
'to_json': to_json,
'to_nice_json': to_nice_json,
'from_json': json.loads,
# yaml
'to_yaml': to_yaml,
'to_nice_yaml': to_nice_yaml,
'from_yaml': yaml.safe_load,
# path
'basename': partial(unicode_wrap, os.path.basename),
'dirname': partial(unicode_wrap, os.path.dirname),
'expanduser': partial(unicode_wrap, os.path.expanduser),
'realpath': partial(unicode_wrap, os.path.realpath),
'relpath': partial(unicode_wrap, os.path.relpath),
# failure testing
'failed' : failed,
'success' : success,
# changed testing
'changed' : changed,
# skip testing
'skipped' : skipped,
# variable existence
'mandatory': mandatory,
# value as boolean
'bool': bool,
# quote string for shell usage
'quote': quote,
# hash filters
# md5 hex digest of string
'md5': md5s,
# sha1 hex digeset of string
'sha1': checksum_s,
# checksum of string as used by ansible for checksuming files
'checksum': checksum_s,
# generic hashing
'password_hash': get_encrypted_password,
'hash': get_hash,
# file glob
'fileglob': fileglob,
# regex
'match': match,
'search': search,
'regex': regex,
'regex_replace': regex_replace,
# ? : ;
'ternary': ternary,
# list
# version comparison
'version_compare': version_compare,
# random stuff
'random': rand,
'shuffle': randomize_list,
}
| gpl-3.0 |
kernsuite-debian/lofar | CEP/Pipeline/recipes/sip/nodes/deprecated/count_timesteps.py | 1 | 1764 | # LOFAR IMAGING PIPELINE
#
# count_timesteps node
# John Swinbank, 2010
# swinbank@transientskp.org
# ------------------------------------------------------------------------------
import os.path
import sys
from pyrap.tables import taql
from lofarpipe.support.lofarnode import LOFARnodeTCP
from lofarpipe.support.utilities import log_time
class count_timesteps(LOFARnodeTCP):
"""
Return the first and last values in the TIME column.
"""
def run(self, infile):
with log_time(self.logger):
if os.path.exists(infile):
self.logger.info("Processing %s" % (infile))
else:
self.logger.error("Dataset %s does not exist" % (infile))
return 1
try:
self.outputs['start_time'] = taql(
"CALC MIN([SELECT TIME from %s])" % infile
)[0]
self.outputs['end_time'] = taql(
"CALC MAX([SELECT TIME from %s])" % infile
)[0]
except Exception as e:
self.logger.error(str(e))
return 1
return 0
if __name__ == "__main__":
# If invoked directly, parse command line arguments for logger information
# and pass the rest to the run() method defined above
# --------------------------------------------------------------------------
jobid, jobhost, jobport = sys.argv[1:4]
sys.exit(count_timesteps(jobid, jobhost, jobport).run_with_stored_arguments())
| gpl-3.0 |
danieljaouen/ansible | test/units/modules/network/f5/test_bigip_ssl_key.py | 27 | 3798 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_ssl_key import ArgumentSpec
from library.modules.bigip_ssl_key import Parameters
from library.modules.bigip_ssl_key import ModuleManager
from library.modules.bigip_ssl_key import HAS_F5SDK
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_ssl_key import ArgumentSpec
from ansible.modules.network.f5.bigip_ssl_key import Parameters
from ansible.modules.network.f5.bigip_ssl_key import ModuleManager
from ansible.modules.network.f5.bigip_ssl_key import HAS_F5SDK
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters_key(self):
key_content = load_fixture('create_insecure_key1.key')
args = dict(
content=key_content,
name="cert1",
partition="Common",
state="present",
password='password',
server='localhost',
user='admin'
)
p = Parameters(params=args)
assert p.name == 'cert1'
assert p.key_filename == 'cert1.key'
assert '-----BEGIN RSA PRIVATE KEY-----' in p.content
assert '-----END RSA PRIVATE KEY-----' in p.content
assert p.key_checksum == '91bdddcf0077e2bb2a0258aae2ae3117be392e83'
assert p.state == 'present'
assert p.user == 'admin'
assert p.server == 'localhost'
assert p.password == 'password'
assert p.partition == 'Common'
class TestModuleManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_import_key_no_key_passphrase(self, *args):
set_module_args(dict(
name='foo',
content=load_fixture('cert1.key'),
state='present',
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
cm = ModuleManager(module=module)
cm.exists = Mock(side_effect=[False, True])
cm.create_on_device = Mock(return_value=True)
results = cm.exec_module()
assert results['changed'] is True
| gpl-3.0 |
techdragon/django | tests/validation/test_picklable.py | 576 | 2010 | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError([ValidationError('a'), ValidationError('b')])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
| bsd-3-clause |
Laimiux/mydeatree | ideas/forms.py | 1 | 2215 | from django import forms
from django.forms import ModelForm
from ideas.models import Favorite, Idea
# A simple contact form with four fields.
class ContributorForm(forms.Form):
email = forms.EmailField()
class IdeaForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
owner = kwargs.pop("owner", None)
super(IdeaForm, self).__init__(*args, **kwargs)
self.fields['title'].required = True
self.fields['title'].max_length = 30
self.fields['text'].error_messages = {'required' : 'Please enter text'}
self.fields['title'].error_messages = {'required': 'Please enter title'}
self.fields['text'].max_length = 140
self.fields['public'].label = 'Would you like to make the idea public?'
def clean_title(self):
title = self.cleaned_data['title']
if len(title) < 5:
raise forms.ValidationError("Title needs to have more than 4 characters")
return title
def clean_text(self):
text = self.cleaned_data['text']
num_words = len(text)
if num_words < 10:
raise forms.ValidationError("Text needs to have more than 9 characters!")
return text
class Meta:
model = Idea
fields = ('title', 'text', 'public')
widgets = {
'text': forms.Textarea(attrs={'cols': 20, 'rows': 10, 'maxlength': 140}),
}
class FavoriteForm(forms.ModelForm):
class Meta:
model = Favorite
fields = ('favorite_idea', 'owner')
#===============================================================================
# class CategoryForm(forms.ModelForm):
#
# def __init__(self, *args, **kwargs):
# super(CategoryForm, self).__init__(*args, **kwargs)
# self.fields['name'].max_length = 30
#
# def clean_name(self):
# text = self.cleaned_data['name']
# if len(text) < 4:
# raise forms.ValidationError("Category name has to be more than 3 letters")
# return text
#
# class Meta:
# model = Category
# fields = ['name']
#=============================================================================== | bsd-3-clause |
erdc/proteus | proteus/mprans/SW2DCV.py | 1 | 96791 | from __future__ import print_function
from __future__ import division
from builtins import str
from builtins import range
from past.utils import old_div
import proteus
from proteus import FemTools
from proteus import LinearAlgebraTools as LAT
from proteus.mprans.cSW2DCV import *
import numpy as np
from proteus.Transport import OneLevelTransport, TC_base, NonlinearEquation
from proteus.Transport import Quadrature, logEvent, memory, BackwardEuler
from proteus.Transport import FluxBoundaryConditions, Comm, DOFBoundaryConditions
from proteus.Transport import cfemIntegrals, globalMax, SparseMat
from . import cArgumentsDict
class NumericalFlux(proteus.NumericalFlux.ShallowWater_2D):
hasInterior = False
def __init__(self, vt, getPointwiseBoundaryConditions,
getAdvectiveFluxBoundaryConditions,
getDiffusiveFluxBoundaryConditions,
getPeriodicBoundaryConditions=None,
h_eps=1.0e-8,
tol_u=1.0e-8):
proteus.NumericalFlux.ShallowWater_2D.__init__(self, vt, getPointwiseBoundaryConditions,
getAdvectiveFluxBoundaryConditions,
getDiffusiveFluxBoundaryConditions,
getPeriodicBoundaryConditions,
h_eps,
tol_u)
self.penalty_constant = 2.0
self.includeBoundaryAdjoint = True
self.boundaryAdjoint_sigma = 1.0
self.hasInterior = False
class RKEV(proteus.TimeIntegration.SSP):
from proteus import TimeIntegration
"""
Wrapper for SSPRK time integration using EV
... more to come ...
"""
def __init__(self, transport, timeOrder=1, runCFL=0.1, integrateInterpolationPoints=False):
BackwardEuler.__init__(
self, transport, integrateInterpolationPoints=integrateInterpolationPoints)
self.trasport = transport
self.runCFL = runCFL
self.dtLast = None
self.dtRatioMax = 2.0
self.isAdaptive = True
# About the cfl
assert hasattr(
transport, 'edge_based_cfl'), "No edge based cfl defined"
self.edge_based_cfl = transport.edge_based_cfl
# Stuff particular for SSP
self.timeOrder = timeOrder # order of approximation
self.nStages = timeOrder # number of stages total
self.lstage = 0 # last stage completed
# storage vectors (at old time step)
self.u_dof_last = {}
# per component lstage values
self.u_dof_lstage = {}
for ci in range(self.nc):
self.u_dof_last[ci] = transport.u[ci].dof.copy()
self.u_dof_lstage[ci] = transport.u[ci].dof.copy()
def choose_dt(self):
maxCFL = 1.0e-6
# COMPUTE edge_based_cfl
rowptr_cMatrix, colind_cMatrix, Cx = self.transport.cterm_global[0].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, Cy = self.transport.cterm_global[1].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, CTx = self.transport.cterm_global_transpose[0].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, CTy = self.transport.cterm_global_transpose[1].getCSRrepresentation()
numDOFsPerEqn = self.transport.u[0].dof.size
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["g"] = self.transport.coefficients.g
argsDict["numDOFsPerEqn"] = numDOFsPerEqn
argsDict["lumped_mass_matrix"] = self.transport.ML
argsDict["h_dof_old"] = self.transport.u[0].dof
argsDict["hu_dof_old"] = self.transport.u[1].dof
argsDict["hv_dof_old"] = self.transport.u[2].dof
argsDict["b_dof"] = self.transport.coefficients.b.dof
argsDict["csrRowIndeces_DofLoops"] = rowptr_cMatrix
argsDict["csrColumnOffsets_DofLoops"] = colind_cMatrix
argsDict["hEps"] = self.transport.hEps
argsDict["Cx"] = Cx
argsDict["Cy"] = Cy
argsDict["CTx"] = CTx
argsDict["CTy"] = CTy
argsDict["dLow"] = self.transport.dLow
argsDict["run_cfl"] = self.runCFL
argsDict["edge_based_cfl"] = self.transport.edge_based_cfl
argsDict["debug"] = 0
adjusted_maxCFL = self.transport.sw2d.calculateEdgeBasedCFL(argsDict)
maxCFL = max(maxCFL, max(adjusted_maxCFL,
globalMax(self.edge_based_cfl.max())))
self.dt = old_div(self.runCFL, maxCFL)
if self.dtLast is None:
self.dtLast = self.dt
if old_div(self.dt, self.dtLast) > self.dtRatioMax:
self.dt = self.dtLast * self.dtRatioMax
self.t = self.tLast + self.dt
# Ignoring dif. time step levels
self.substeps = [self.t for i in range(self.nStages)]
assert (self.dt > 1E-8), ("Time step is probably getting too small: ", self.dt, adjusted_maxCFL)
def initialize_dt(self, t0, tOut, q):
"""
Modify self.dt
"""
self.tLast = t0
self.choose_dt()
self.t = t0 + self.dt
def setCoefficients(self):
"""
beta are all 1's here
mwf not used right now
"""
# Not needed for an implementation when alpha and beta are not used
def updateStage(self):
"""
Need to switch to use coefficients
"""
self.lstage += 1
assert self.timeOrder in [1, 2, 3]
assert self.lstage > 0 and self.lstage <= self.timeOrder
# print "within update stage...: ", self.lstage
if self.timeOrder == 3:
if self.lstage == 1:
for ci in range(self.nc):
self.u_dof_lstage[ci][:] = self.transport.u[ci].dof
# update u_dof_old
self.transport.h_dof_old[:] = self.u_dof_lstage[0]
self.transport.hu_dof_old[:] = self.u_dof_lstage[1]
self.transport.hv_dof_old[:] = self.u_dof_lstage[2]
logEvent("First stage of SSP33 method finished", level=4)
elif self.lstage == 2:
for ci in range(self.nc):
self.u_dof_lstage[ci][:] = self.transport.u[ci].dof
self.u_dof_lstage[ci] *= old_div(1., 4.)
self.u_dof_lstage[ci] += 3. / 4. * self.u_dof_last[ci]
# update u_dof_old
self.transport.h_dof_old[:] = self.u_dof_lstage[0]
self.transport.hu_dof_old[:] = self.u_dof_lstage[1]
self.transport.hv_dof_old[:] = self.u_dof_lstage[2]
logEvent("Second stage of SSP33 method finished", level=4)
else:
for ci in range(self.nc):
self.u_dof_lstage[ci][:] = self.transport.u[ci].dof
self.u_dof_lstage[ci][:] *= old_div(2.0, 3.0)
self.u_dof_lstage[ci][:] += 1.0 / 3.0 * self.u_dof_last[ci]
# update solution to u[0].dof
self.transport.u[ci].dof[:] = self.u_dof_lstage[ci]
# update u_dof_old
self.transport.h_dof_old[:] = self.u_dof_last[0]
self.transport.hu_dof_old[:] = self.u_dof_last[1]
self.transport.hv_dof_old[:] = self.u_dof_last[2]
logEvent("Third stage of SSP33 method finished", level=4)
elif self.timeOrder == 2:
if self.lstage == 1:
logEvent("First stage of SSP22 method finished", level=4)
for ci in range(self.nc):
self.u_dof_lstage[ci][:] = self.transport.u[ci].dof
# Update u_dof_old
self.transport.h_dof_old[:] = self.u_dof_lstage[0]
self.transport.hu_dof_old[:] = self.u_dof_lstage[1]
self.transport.hv_dof_old[:] = self.u_dof_lstage[2]
else:
for ci in range(self.nc):
self.u_dof_lstage[ci][:] = self.transport.u[ci].dof
self.u_dof_lstage[ci][:] *= old_div(1., 2.)
self.u_dof_lstage[ci][:] += 1. / 2. * self.u_dof_last[ci]
# update solution to u[0].dof
self.transport.u[ci].dof[:] = self.u_dof_lstage[ci]
# Update u_dof_old
self.transport.h_dof_old[:] = self.u_dof_last[0]
self.transport.hu_dof_old[:] = self.u_dof_last[1]
self.transport.hv_dof_old[:] = self.u_dof_last[2]
logEvent("Second stage of SSP22 method finished", level=4)
else:
assert self.timeOrder == 1
logEvent("FE method finished", level=4)
def initializeTimeHistory(self, resetFromDOF=True):
"""
Push necessary information into time history arrays
"""
for ci in range(self.nc):
self.u_dof_last[ci][:] = self.transport.u[ci].dof[:]
def updateTimeHistory(self, resetFromDOF=False):
"""
assumes successful step has been taken
"""
self.t = self.tLast + self.dt
for ci in range(self.nc):
self.u_dof_last[ci][:] = self.transport.u[ci].dof[:]
self.lstage = 0
self.dtLast = self.dt
self.tLast = self.t
def generateSubsteps(self, tList):
"""
create list of substeps over time values given in tList. These correspond to stages
"""
self.substeps = []
tLast = self.tLast
for t in tList:
dttmp = t - tLast
self.substeps.extend([tLast + dttmp for i in range(self.nStages)])
tLast = t
def resetOrder(self, order):
"""
initialize data structures for stage updges
"""
self.timeOrder = order # order of approximation
self.nStages = order # number of stages total
self.lstage = 0 # last stage completed
# storage vectors
# per component stage values, list with array at each stage
self.u_dof_lstage = {}
for ci in range(self.nc):
self.u_dof_lstage[ci] = self.transport.u[ci].dof.copy()
self.substeps = [self.t for i in range(self.nStages)]
def setFromOptions(self, nOptions):
"""
allow classes to set various numerical parameters
"""
if 'runCFL' in dir(nOptions):
self.runCFL = nOptions.runCFL
flags = ['timeOrder']
for flag in flags:
if flag in dir(nOptions):
val = getattr(nOptions, flag)
setattr(self, flag, val)
if flag == 'timeOrder':
self.resetOrder(self.timeOrder)
class Coefficients(proteus.TransportCoefficients.TC_base):
"""
The coefficients for the shallow water equations
"""
def __init__(self,
bathymetry,
g=9.81,
nd=2,
sd=True,
movingDomain=False,
useRBLES=0.0,
useMetrics=0.0,
modelIndex=0,
cE=1.0,
LUMPED_MASS_MATRIX=1,
LINEAR_FRICTION=0,
mannings=0.,
forceStrongConditions=True,
constrainedDOFs=None):
self.forceStrongConditions = forceStrongConditions
self.constrainedDOFs = constrainedDOFs
self.bathymetry = bathymetry
self.useRBLES = useRBLES
self.useMetrics = useMetrics
self.sd = sd
self.g = g
self.nd = nd
self.cE = cE
self.LUMPED_MASS_MATRIX = LUMPED_MASS_MATRIX
self.LINEAR_FRICTION = LINEAR_FRICTION
self.mannings = mannings
self.modelIndex = modelIndex
mass = {}
advection = {}
diffusion = {}
potential = {}
reaction = {}
hamiltonian = {}
if nd == 2:
variableNames = ['h', 'h_u', 'h_v']
mass = {0: {0: 'linear'},
1: {0: 'linear', 1: 'linear'},
2: {0: 'linear', 2: 'linear'}}
advection = {0: {0: 'nonlinear',
1: 'nonlinear',
2: 'nonlinear'},
1: {0: 'nonlinear',
1: 'nonlinear',
2: 'nonlinear'},
2: {0: 'nonlinear',
1: 'nonlinear',
2: 'nonlinear'}}
diffusion = {1: {1: {1: 'constant'}, 2: {2: 'constant'}},
2: {2: {2: 'constant'}, 1: {1: 'constant'}}}
sdInfo = {(1, 1): (np.array([0, 1, 2], dtype='i'),
np.array([0, 1], dtype='i')),
(1, 2): (np.array([0, 0, 1], dtype='i'),
np.array([0], dtype='i')),
(2, 2): (np.array([0, 1, 2], dtype='i'),
np.array([0, 1], dtype='i')),
(2, 1): (np.array([0, 1, 1], dtype='i'),
np.array([1], dtype='i'))}
potential = {1: {1: 'u'},
2: {2: 'u'}}
reaction = {1: {0: 'linear'},
2: {0: 'linear'}}
TC_base.__init__(self,
3, # Number of components
mass,
advection,
diffusion,
potential,
reaction,
hamiltonian,
variableNames,
sparseDiffusionTensors=sdInfo,
useSparseDiffusion=sd,
movingDomain=movingDomain)
self.vectorComponents = [1, 2]
def attachModels(self, modelList):
self.model = modelList[self.modelIndex]
# pass
def initializeMesh(self, mesh):
x = mesh.nodeArray[:, 0]
y = mesh.nodeArray[:, 1]
if self.bathymetry is None:
self.b.dof = mesh.nodeArray[:, 2].copy()
else:
self.b.dof = self.bathymetry[0]([x, y])
def initializeElementQuadrature(self, t, cq):
pass
def initializeElementBoundaryQuadrature(self, t, cebq, cebq_global):
pass
def initializeGlobalExteriorElementBoundaryQuadrature(self, t, cebqe):
pass
def updateToMovingDomain(self, t, c):
pass
def evaluate(self, t, c):
pass
def preStep(self, t, firstStep=False):
if firstStep:
# Init boundaryIndex
assert self.model.boundaryIndex is None and self.model.normalx is not None, "Check boundaryIndex, normalx and normaly"
self.model.boundaryIndex = []
for i in range(self.model.normalx.size):
if self.model.normalx[i] != 0 or self.model.normaly[i] != 0:
self.model.boundaryIndex.append(i)
self.model.boundaryIndex = np.array(self.model.boundaryIndex)
# Init reflectingBoundaryIndex for partial reflecting boundaries
self.model.reflectingBoundaryIndex = np.where(np.isin(self.model.mesh.nodeMaterialTypes, 99))[0].tolist()
self.model.reflectingBoundaryIndex = np.array(self.model.reflectingBoundaryIndex)
#
self.model.h_dof_old[:] = self.model.u[0].dof
self.model.hu_dof_old[:] = self.model.u[1].dof
self.model.hv_dof_old[:] = self.model.u[2].dof
def postStep(self, t, firstStep=False):
pass
class LevelModel(proteus.Transport.OneLevelTransport):
nCalls = 0
def __init__(self,
uDict,
phiDict,
testSpaceDict,
matType,
dofBoundaryConditionsDict,
dofBoundaryConditionsSetterDict,
coefficients,
elementQuadrature,
elementBoundaryQuadrature,
fluxBoundaryConditionsDict=None,
advectiveFluxBoundaryConditionsSetterDict=None,
diffusiveFluxBoundaryConditionsSetterDictDict=None,
stressTraceBoundaryConditionsSetterDictDict=None,
stabilization=None,
shockCapturing=None,
conservativeFluxDict=None,
numericalFluxType=None,
TimeIntegrationClass=None,
massLumping=False,
reactionLumping=False,
options=None,
name='SW2DCV',
reuse_trial_and_test_quadrature=True,
sd=True,
movingDomain=False,
bdyNullSpace=False):
self.bdyNullSpace = bdyNullSpace
self.inf_norm_hu = [] # To test 1D well balancing
self.secondCallCalculateResidual = 0
self.postProcessing = False # this is a hack to test the effect of post-processing
#
# set the objects describing the method and boundary conditions
#
self.movingDomain = movingDomain
self.tLast_mesh = None
#
# cek todo clean up these flags in the optimized version
self.bcsTimeDependent = options.bcsTimeDependent
self.bcsSet = False
self.name = name
self.sd = sd
self.lowmem = True
self.timeTerm = True # allow turning off the time derivative
self.testIsTrial = True
self.phiTrialIsTrial = True
self.u = uDict
self.Hess = False
if isinstance(self.u[0].femSpace, FemTools.C0_AffineQuadraticOnSimplexWithNodalBasis):
self.Hess = True
self.ua = {} # analytical solutions
self.phi = phiDict
self.dphi = {}
self.matType = matType
# mwf try to reuse test and trial information across components if spaces are the same
self.reuse_test_trial_quadrature = reuse_trial_and_test_quadrature # True#False
if self.reuse_test_trial_quadrature:
for ci in range(1, coefficients.nc):
assert self.u[ci].femSpace.__class__.__name__ == self.u[0].femSpace.__class__.__name__, "to reuse_test_trial_quad all femSpaces must be the same!"
# Simplicial Mesh
# assume the same mesh for all components for now
self.mesh = self.u[0].femSpace.mesh
self.testSpace = testSpaceDict
self.dirichletConditions = dofBoundaryConditionsDict
# explicit Dirichlet conditions for now, no Dirichlet BC constraints
self.dirichletNodeSetList = None
self.coefficients = coefficients
# cek hack? give coefficients a bathymetriy array
import copy
self.coefficients.b = self.u[0].copy()
self.coefficients.b.name = 'b'
self.coefficients.b.dof.fill(0.0)
#
self.coefficients.initializeMesh(self.mesh)
self.nc = self.coefficients.nc
self.stabilization = stabilization
self.shockCapturing = shockCapturing
# no velocity post-processing for now
self.conservativeFlux = conservativeFluxDict
self.fluxBoundaryConditions = fluxBoundaryConditionsDict
self.advectiveFluxBoundaryConditionsSetterDict = advectiveFluxBoundaryConditionsSetterDict
self.diffusiveFluxBoundaryConditionsSetterDictDict = diffusiveFluxBoundaryConditionsSetterDictDict
# determine if we need element boundary storage
self.elementBoundaryIntegrals = {}
for ci in range(self.nc):
self.elementBoundaryIntegrals[ci] = ((self.conservativeFlux is not None)
or (numericalFluxType is not None)
or (self.fluxBoundaryConditions[ci] == 'outFlow')
or (self.fluxBoundaryConditions[ci] == 'mixedFlow')
or (self.fluxBoundaryConditions[ci] == 'setFlow'))
#
# calculate some dimensions
#
# assume same space dim for all variables
self.nSpace_global = self.u[0].femSpace.nSpace_global
self.nDOF_trial_element = [u_j.femSpace.max_nDOF_element for u_j in list(self.u.values())]
self.nDOF_phi_trial_element = [phi_k.femSpace.max_nDOF_element for phi_k in list(self.phi.values())]
self.n_phi_ip_element = [phi_k.femSpace.referenceFiniteElement.interpolationConditions.nQuadraturePoints for phi_k in list(self.phi.values())]
self.nDOF_test_element = [femSpace.max_nDOF_element for femSpace in list(self.testSpace.values())]
self.nFreeDOF_global = [dc.nFreeDOF_global for dc in list(self.dirichletConditions.values())]
self.nVDOF_element = sum(self.nDOF_trial_element)
self.nFreeVDOF_global = sum(self.nFreeDOF_global)
#
NonlinearEquation.__init__(self, self.nFreeVDOF_global)
#
# build the quadrature point dictionaries from the input (this
# is just for convenience so that the input doesn't have to be
# complete)
#
elementQuadratureDict = {}
elemQuadIsDict = isinstance(elementQuadrature, dict)
if elemQuadIsDict: # set terms manually
for I in self.coefficients.elementIntegralKeys:
if I in elementQuadrature:
elementQuadratureDict[I] = elementQuadrature[I]
else:
elementQuadratureDict[I] = elementQuadrature['default']
else:
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[I] = elementQuadrature
if self.shockCapturing is not None:
for ci in self.shockCapturing.components:
if elemQuadIsDict:
if ('numDiff', ci, ci) in elementQuadrature:
elementQuadratureDict[(
'numDiff', ci, ci)] = elementQuadrature[('numDiff', ci, ci)]
else:
elementQuadratureDict[(
'numDiff', ci, ci)] = elementQuadrature['default']
else:
elementQuadratureDict[(
'numDiff', ci, ci)] = elementQuadrature
if massLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('m', ci)] = Quadrature.SimplexLobattoQuadrature(
self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(
'stab',) + I[1:]] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
if reactionLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('r', ci)] = Quadrature.SimplexLobattoQuadrature(
self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(
'stab',) + I[1:]] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
elementBoundaryQuadratureDict = {}
if isinstance(elementBoundaryQuadrature, dict): # set terms manually
for I in self.coefficients.elementBoundaryIntegralKeys:
if I in elementBoundaryQuadrature:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature[I]
else:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature['default']
else:
for I in self.coefficients.elementBoundaryIntegralKeys:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature
#
# find the union of all element quadrature points and
# build a quadrature rule for each integral that has a
# weight at each point in the union
# mwf include tag telling me which indices are which quadrature rule?
(self.elementQuadraturePoints, self.elementQuadratureWeights,
self.elementQuadratureRuleIndeces) = Quadrature.buildUnion(elementQuadratureDict)
self.nQuadraturePoints_element = self.elementQuadraturePoints.shape[0]
self.nQuadraturePoints_global = self.nQuadraturePoints_element * self.mesh.nElements_global
#
# Repeat the same thing for the element boundary quadrature
#
(self.elementBoundaryQuadraturePoints,
self.elementBoundaryQuadratureWeights,
self.elementBoundaryQuadratureRuleIndeces) = Quadrature.buildUnion(elementBoundaryQuadratureDict)
self.nElementBoundaryQuadraturePoints_elementBoundary = self.elementBoundaryQuadraturePoints.shape[0]
self.nElementBoundaryQuadraturePoints_global = (self.mesh.nElements_global
* self.mesh.nElementBoundaries_element
* self.nElementBoundaryQuadraturePoints_elementBoundary)
#
# simplified allocations for test==trial and also check if space is mixed or not
#
self.q = {}
self.ebq = {}
self.ebq_global = {}
self.ebqe = {}
self.phi_ip = {}
# To compute edge_based_cfl from within choose_dt of RKEV
self.edge_based_cfl = np.zeros(self.u[0].dof.shape)
self.dLow = None
# Old DOFs
# NOTE (Mql): It is important to link h_dof_old by reference with u[0].dof (and so on).
# This is because I need the initial condition to be passed to them as well (before calling calculateResidual).
# During preStep I change this and copy the values instead of keeping the reference.
self.h_dof_old = None
self.hu_dof_old = None
self.hv_dof_old = None
# Vector for mass matrix
self.check_positivity_water_height = True
# mesh
self.q['x'] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, 3), 'd')
self.ebqe['x'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, 3), 'd')
self.ebq_global[('totalFlux', 0)] = np.zeros((self.mesh.nElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebq_global[('velocityAverage', 0)] = np.zeros((self.mesh.nElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.q[('dV_u', 0)] = (old_div(1.0, self.mesh.nElements_global)) * np.ones((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('dV_u', 1)] = (old_div(1.0, self.mesh.nElements_global)) * np.ones((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('dV_u', 2)] = (old_div(1.0, self.mesh.nElements_global)) * np.ones((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q['dV'] = self.q[('dV_u',0)]
self.q[('u', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('u', 1)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('u', 2)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m', 0)] = self.q[('u', 0)]
self.q[('m', 1)] = self.q[('u', 1)]
self.q[('m', 2)] = self.q[('u', 2)]
self.q[('m_last', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_last', 1)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_last', 2)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_tmp', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_tmp', 1)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_tmp', 2)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('f', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('velocity', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('cfl', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.ebqe[('u', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('u', 1)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('u', 2)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('advectiveFlux_bc_flag', 0)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('advectiveFlux_bc_flag', 1)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('advectiveFlux_bc_flag', 2)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('diffusiveFlux_bc_flag', 1, 1)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('diffusiveFlux_bc_flag', 2, 2)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('advectiveFlux_bc', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('advectiveFlux_bc', 1)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('advectiveFlux_bc', 2)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('diffusiveFlux_bc', 1, 1)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('penalty')] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('diffusiveFlux_bc', 2, 2)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('velocity', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.ebqe[('velocity', 1)] = np.zeros((self.mesh.nExteriorElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.ebqe[('velocity', 2)] = np.zeros((self.mesh.nExteriorElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.points_elementBoundaryQuadrature = set()
self.scalars_elementBoundaryQuadrature = set([('u', ci) for ci in range(self.nc)])
self.vectors_elementBoundaryQuadrature = set()
self.tensors_elementBoundaryQuadrature = set()
#
# show quadrature
#
logEvent("Dumping quadrature shapes for model %s" % self.name, level=9)
logEvent("Element quadrature array (q)", level=9)
for (k, v) in list(self.q.items()):
logEvent(str((k, v.shape)), level=9)
logEvent("Element boundary quadrature (ebq)", level=9)
for (k, v) in list(self.ebq.items()):
logEvent(str((k, v.shape)), level=9)
logEvent("Global element boundary quadrature (ebq_global)", level=9)
for (k, v) in list(self.ebq_global.items()):
logEvent(str((k, v.shape)), level=9)
logEvent("Exterior element boundary quadrature (ebqe)", level=9)
for (k, v) in list(self.ebqe.items()):
logEvent(str((k, v.shape)), level=9)
logEvent("Interpolation points for nonlinear diffusion potential (phi_ip)", level=9)
for (k, v) in list(self.phi_ip.items()):
logEvent(str((k, v.shape)), level=9)
#
# allocate residual and Jacobian storage
#
#
# allocate residual and Jacobian storage
#
self.elementResidual = [np.zeros(
(self.mesh.nElements_global,
self.nDOF_test_element[ci]),
'd')]
self.inflowBoundaryBC = {}
self.inflowBoundaryBC_values = {}
self.inflowFlux = {}
for cj in range(self.nc):
self.inflowBoundaryBC[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global,), 'i')
self.inflowBoundaryBC_values[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nDOF_trial_element[cj]), 'd')
self.inflowFlux[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.internalNodes = set(range(self.mesh.nNodes_global))
# identify the internal nodes this is ought to be in mesh
# \todo move this to mesh
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
eN_global = self.mesh.elementBoundaryElementsArray[ebN, 0]
ebN_element = self.mesh.elementBoundaryLocalElementBoundariesArray[ebN, 0]
for i in range(self.mesh.nNodes_element):
if i != ebN_element:
I = self.mesh.elementNodesArray[eN_global, i]
self.internalNodes -= set([I])
self.nNodes_internal = len(self.internalNodes)
self.internalNodesArray = np.zeros((self.nNodes_internal,), 'i')
for nI, n in enumerate(self.internalNodes):
self.internalNodesArray[nI] = n
#
del self.internalNodes
self.internalNodes = None
logEvent("Updating local to global mappings", 2)
self.updateLocal2Global()
logEvent("Building time integration object", 2)
logEvent(memory("inflowBC, internalNodes,updateLocal2Global", "OneLevelTransport"), level=4)
self.timeIntegration = TimeIntegrationClass(self)
if options is not None:
self.timeIntegration.setFromOptions(options)
logEvent(memory("TimeIntegration", "OneLevelTransport"), level=4)
logEvent("Calculating numerical quadrature formulas", 2)
self.calculateQuadrature()
self.setupFieldStrides()
# hEps: this is use to regularize the flux and re-define the dry states
self.eps = None
self.hEps = None
self.hReg = None
self.ML = None # lumped mass matrix
self.MC_global = None # consistent mass matrix
### Global C Matrices (mql)
self.cterm_global = None
self.cterm_transpose_global = None
### For convex limiting
self.urelax = None
self.drelax = None
self.dH_minus_dL = None
self.muH_minus_muL = None
self.size_of_domain = None
#
self.hLow = None
self.huLow = None
self.hvLow = None
#
self.h_min = None
self.h_max = None
self.kin_max = None
self.KE_tiny = None
#
self.extendedSourceTerm_hu = None
self.extendedSourceTerm_hv = None
self.new_SourceTerm_hu = None
self.new_SourceTerm_hv = None
## for EV
self.dij_small = None
self.global_entropy_residual = None
## NORMALS
self.COMPUTE_NORMALS = 1
self.normalx = None
self.normaly = None
self.boundaryIndex = None
self.reflectingBoundaryConditions = False
self.reflectingBoundaryIndex = None
if 'reflecting_BCs' in dir(options) and options.reflecting_BCs == True:
self.reflectingBoundaryConditions = True
# Aux quantity at DOFs to be filled by optimized code (MQL)
self.quantDOFs = None
comm = Comm.get()
self.comm = comm
if comm.size() > 1:
assert numericalFluxType is not None and numericalFluxType.useWeakDirichletConditions, "You must use a numerical flux to apply weak boundary conditions for parallel runs"
logEvent(memory("stride+offset", "OneLevelTransport"), level=4)
if numericalFluxType is not None:
if options is None or options.periodicDirichletConditions is None:
self.numericalFlux = numericalFluxType(self,
dofBoundaryConditionsSetterDict,
advectiveFluxBoundaryConditionsSetterDict,
diffusiveFluxBoundaryConditionsSetterDictDict)
else:
self.numericalFlux = numericalFluxType(self,
dofBoundaryConditionsSetterDict,
advectiveFluxBoundaryConditionsSetterDict,
diffusiveFluxBoundaryConditionsSetterDictDict,
options.periodicDirichletConditions)
else:
self.numericalFlux = None
# set penalty terms
# cek todo move into numerical flux initialization
if 'penalty' in self.ebq_global:
for ebN in range(self.mesh.nElementBoundaries_global):
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebq_global['penalty'][ebN, k] = old_div(self.numericalFlux.penalty_constant,
(self.mesh.elementBoundaryDiametersArray[ebN]**self.numericalFlux.penalty_power))
# penalty term
# cek move to Numerical flux initialization
if 'penalty' in self.ebqe:
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebqe['penalty'][ebNE, k] = old_div(self.numericalFlux.penalty_constant,
self.mesh.elementBoundaryDiametersArray[ebN]**self.numericalFlux.penalty_power)
logEvent(memory("numericalFlux", "OneLevelTransport"), level=4)
self.elementEffectiveDiametersArray = self.mesh.elementInnerDiametersArray
# use post processing tools to get conservative fluxes, None by default
if self.postProcessing:
self.q[('v', 0)] = self.tmpvt.q[('v', 0)]
self.ebq[('v', 0)] = self.tmpvt.ebq[('v', 0)]
self.ebq[('w', 0)] = self.tmpvt.ebq[('w', 0)]
self.ebq['sqrt(det(g))'] = self.tmpvt.ebq['sqrt(det(g))']
self.ebq['n'] = self.tmpvt.ebq['n']
self.ebq[('dS_u', 0)] = self.tmpvt.ebq[('dS_u', 0)]
self.ebqe['dS'] = self.tmpvt.ebqe['dS']
self.ebqe['n'] = self.tmpvt.ebqe['n']
self.ebq_global['n'] = self.tmpvt.ebq_global['n']
self.ebq_global['x'] = self.tmpvt.ebq_global['x']
from proteus import PostProcessingTools
self.velocityPostProcessor = PostProcessingTools.VelocityPostProcessingChooser(self)
logEvent(memory("velocity postprocessor", "OneLevelTransport"), level=4)
# helper for writing out data storage
from proteus import Archiver
self.elementQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.elementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.exteriorElementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
for ci, fbcObject in list(self.fluxBoundaryConditionsObjectsDict.items()):
self.ebqe[('advectiveFlux_bc_flag', ci)] = np.zeros(self.ebqe[('advectiveFlux_bc', ci)].shape, 'i')
for t, g in list(fbcObject.advectiveFluxBoundaryConditionsDict.items()):
if ci in self.coefficients.advection:
self.ebqe[('advectiveFlux_bc', ci)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('advectiveFlux_bc_flag', ci)][t[0], t[1]] = 1
for ck, diffusiveFluxBoundaryConditionsDict in list(fbcObject.diffusiveFluxBoundaryConditionsDictDict.items()):
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)] = np.zeros(self.ebqe[('diffusiveFlux_bc', ck, ci)].shape, 'i')
for t, g in list(diffusiveFluxBoundaryConditionsDict.items()):
self.ebqe[('diffusiveFlux_bc', ck, ci)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)][t[0], t[1]] = 1
# self.numericalFlux.setDirichletValues(self.ebqe)
if self.movingDomain:
self.MOVING_DOMAIN = 1.0
else:
self.MOVING_DOMAIN = 0.0
# cek hack
self.movingDomain = False
self.MOVING_DOMAIN = 0.0
if self.mesh.nodeVelocityArray is None:
self.mesh.nodeVelocityArray = np.zeros(self.mesh.nodeArray.shape, 'd')
# cek/ido todo replace python loops in modules with optimized code if possible/necessary
self.forceStrongConditions = self.coefficients.forceStrongConditions
self.dirichletConditionsForceDOF = {}
if self.forceStrongConditions:
for cj in range(self.nc):
self.dirichletConditionsForceDOF[cj] = DOFBoundaryConditions(
self.u[cj].femSpace, dofBoundaryConditionsSetterDict[cj], weakDirichletConditions=False)
compKernelFlag = 0
self.elementDiameter = self.mesh.elementDiametersArray
print(self.nSpace_global, " nSpace_global")
self.sw2d = cSW2DCV_base(self.nSpace_global,
self.nQuadraturePoints_element,
self.u[0].femSpace.elementMaps.localFunctionSpace.dim,
self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim,
self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim,
self.nElementBoundaryQuadraturePoints_elementBoundary,
compKernelFlag)
self.calculateResidual = self.sw2d.calculateResidual
# define function to compute entropy viscosity residual
self.calculateEV = self.sw2d.calculateEV
if (self.coefficients.LUMPED_MASS_MATRIX):
self.calculateJacobian = self.sw2d.calculateLumpedMassMatrix
else:
self.calculateJacobian = self.sw2d.calculateMassMatrix
#
self.dofsXCoord = None
self.dofsYCoord = None
self.constrainedDOFsIndices = None
self.dataStructuresInitialized = False
# PARALLEL VECTORS #
self.par_normalx = None
self.par_normaly = None
self.par_ML = None
# for source terms
self.par_extendedSourceTerm_hu = None
self.par_extendedSourceTerm_hv = None
self.par_new_SourceTerm_hu = None
self.par_new_SourceTerm_hv = None
# for parallel entropy residual
self.par_global_entropy_residual = None
def FCTStep(self):
# NOTE: this function is meant to be called within the solver
comm = Comm.get()
rowptr, colind, MassMatrix = self.MC_global.getCSRrepresentation()
# Extract hnp1 from global solution u
index = list(range(0, len(self.timeIntegration.u)))
hIndex = index[0::3]
huIndex = index[1::3]
hvIndex = index[2::3]
# create limited solution
limited_hnp1 = np.zeros(self.h_dof_old.shape)
limited_hunp1 = np.zeros(self.h_dof_old.shape)
limited_hvnp1 = np.zeros(self.h_dof_old.shape)
self.KE_tiny = self.hEps * comm.globalMax(np.amax(self.kin_max))
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["dt"] = self.timeIntegration.dt
argsDict["NNZ"] = self.nnz
argsDict["numDOFs"] = len(rowptr) - 1
argsDict["lumped_mass_matrix"] = self.ML
argsDict["h_old"] = self.h_dof_old
argsDict["hu_old"] = self.hu_dof_old
argsDict["hv_old"] = self.hv_dof_old
argsDict["b_dof"] = self.coefficients.b.dof
argsDict["high_order_hnp1"] = self.timeIntegration.u[hIndex]
argsDict["high_order_hunp1"] = self.timeIntegration.u[huIndex]
argsDict["high_order_hvnp1"] = self.timeIntegration.u[hvIndex]
argsDict["extendedSourceTerm_hu"] = self.extendedSourceTerm_hu
argsDict["extendedSourceTerm_hv"] = self.extendedSourceTerm_hv
argsDict["limited_hnp1"] = limited_hnp1
argsDict["limited_hunp1"] = limited_hunp1
argsDict["limited_hvnp1"] = limited_hvnp1
argsDict["csrRowIndeces_DofLoops"] = rowptr
argsDict["csrColumnOffsets_DofLoops"] = colind
argsDict["MassMatrix"] = MassMatrix
argsDict["dH_minus_dL"] = self.dH_minus_dL
argsDict["muH_minus_muL"] = self.muH_minus_muL
argsDict["hEps"] = self.hEps
argsDict["LUMPED_MASS_MATRIX"] = self.coefficients.LUMPED_MASS_MATRIX
argsDict["dLow"] = self.dLow
argsDict["new_SourceTerm_hu"] = self.new_SourceTerm_hu
argsDict["new_SourceTerm_hv"] = self.new_SourceTerm_hv
argsDict["hLow"] = self.hLow
argsDict["huLow"] = self.huLow
argsDict["hvLow"] = self.hvLow
argsDict["h_min"] = self.h_min
argsDict["h_max"] = self.h_max
argsDict["kin_max"] = self.kin_max
argsDict["KE_tiny"] = self.KE_tiny
self.sw2d.convexLimiting(argsDict)
# Pass the post processed hnp1 solution to global solution u
self.timeIntegration.u[hIndex] = limited_hnp1
self.timeIntegration.u[huIndex] = limited_hunp1
self.timeIntegration.u[hvIndex] = limited_hvnp1
def computeEV(self):
entropy_residual = np.zeros(self.u[0].dof.shape)
small = 0.0
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["g"] = self.coefficients.g
argsDict["h_dof_old"] = self.h_dof_old
argsDict["hu_dof_old"] = self.hu_dof_old
argsDict["hv_dof_old"] = self.hv_dof_old
argsDict["b_dof"] = self.coefficients.b.dof
argsDict["Cx"] = self.Cx
argsDict["Cy"] = self.Cy
argsDict["CTx"] = self.CTx
argsDict["CTy"] = self.CTy
argsDict["numDOFsPerEqn"] = self.numDOFsPerEqn
argsDict["csrRowIndeces_DofLoops"] = self.rowptr_cMatrix
argsDict["csrColumnOffsets_DofLoops"] = self.colind_cMatrix
argsDict["lumped_mass_matrix"] = self.ML
argsDict["eps"] = self.eps
argsDict["hEps"] = self.hEps
argsDict["global_entropy_residual"] = self.global_entropy_residual
argsDict["dij_small"] = small
# compute entropy residual
self.sw2d.calculateEV(argsDict)
# save things
self.dij_small = globalMax(argsDict.dscalar["dij_small"])
#
def getDOFsCoord(self):
# get x,y coordinates of all DOFs #
self.dofsXCoord = np.zeros(self.u[0].dof.shape, 'd')
self.dofsYCoord = np.zeros(self.u[0].dof.shape, 'd')
self.dofsXCoord[:] = self.mesh.nodeArray[:, 0]
self.dofsYCoord[:] = self.mesh.nodeArray[:, 1]
#
def getCMatrices(self):
# since we only need cterm_global to persist, we can drop the other self.'s
self.cterm = {}
self.cterm_a = {}
self.cterm_global = {}
self.cterm_transpose = {}
self.cterm_a_transpose = {}
self.cterm_global_transpose = {}
# Sparsity pattern for Jacobian
rowptr, colind, nzval = self.jacobian.getCSRrepresentation()
nnz = nzval.shape[-1] # number of non-zero entries in sparse matrix
###########################################
##### SPARSITY PATTERN FOR C MATRICES #####
###########################################
# construct nnz_cMatrix, czval_cMatrix, rowptr_cMatrix, colind_cMatrix C matrix
nnz_cMatrix = nnz // 3 // 3 # This is always true for the SWEs in 2D
nzval_cMatrix = np.zeros(nnz_cMatrix) # This is enough since the values are filled later
rowptr_cMatrix = np.zeros(self.u[0].dof.size + 1, 'i') # NOTE that in particular rowptr_cMatrix[0]=0
colind_cMatrix = np.zeros(nnz_cMatrix, 'i')
# fill vector rowptr_cMatrix
for i in range(1, rowptr_cMatrix.size):
rowptr_cMatrix[i] = rowptr_cMatrix[i - 1] + old_div((rowptr[3 * (i - 1) + 1] - rowptr[3 * (i - 1)]), 3)
# = rowptr_cMatrix[i-1] + 1/3*(Number of columns of Jacobian's row 3*(i-1)=0, 3, 6, 9, 12, ... 3*(i-1), ..., 3*n-3)
# fill vector colind_cMatrix
i_cMatrix = 0 # ith row of cMatrix
# 0 to total num of DOFs (i.e. num of rows of jacobian)
for i in range(rowptr.size - 1):
if (i % 3 == 0): # Just consider the rows related to the h variable
for j, offset in enumerate(range(rowptr[i], rowptr[i + 1])):
offset_cMatrix = list(range(rowptr_cMatrix[i_cMatrix], rowptr_cMatrix[i_cMatrix + 1]))
if (j % 3 == 0):
colind_cMatrix[offset_cMatrix[old_div(j, 3)]] = old_div(colind[offset], 3)
i_cMatrix += 1
# END OF SPARSITY PATTERN FOR C MATRICES
di = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nSpace_global),
'd') # direction of derivative
# JACOBIANS (FOR ELEMENT TRANSFORMATION)
self.q[('J')] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nSpace_global,
self.nSpace_global),
'd')
self.q[('inverse(J)')] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nSpace_global,
self.nSpace_global),
'd')
self.q[('det(J)')] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element),
'd')
self.u[0].femSpace.elementMaps.getJacobianValues(self.elementQuadraturePoints,
self.q['J'],
self.q['inverse(J)'],
self.q['det(J)'])
self.q['abs(det(J))'] = np.abs(self.q['det(J)'])
# SHAPE FUNCTIONS
self.q[('w', 0)] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nDOF_test_element[0]),
'd')
self.q[('w*dV_m', 0)] = self.q[('w', 0)].copy()
self.u[0].femSpace.getBasisValues(self.elementQuadraturePoints, self.q[('w', 0)])
cfemIntegrals.calculateWeightedShape(self.elementQuadratureWeights[('u', 0)],
self.q['abs(det(J))'],
self.q[('w', 0)],
self.q[('w*dV_m', 0)])
# GRADIENT OF TEST FUNCTIONS
self.q[('grad(w)', 0)] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nDOF_test_element[0],
self.nSpace_global),
'd')
self.u[0].femSpace.getBasisGradientValues(self.elementQuadraturePoints,
self.q['inverse(J)'],
self.q[('grad(w)', 0)])
self.q[('grad(w)*dV_f', 0)] = np.zeros((self.mesh.nElements_global,
self.nQuadraturePoints_element,
self.nDOF_test_element[0],
self.nSpace_global),
'd')
cfemIntegrals.calculateWeightedShapeGradients(self.elementQuadratureWeights[('u', 0)],
self.q['abs(det(J))'],
self.q[('grad(w)', 0)],
self.q[('grad(w)*dV_f', 0)])
#
# lumped mass matrix
#
# assume a linear mass term
dm = np.ones(self.q[('u', 0)].shape, 'd')
elementMassMatrix = np.zeros((self.mesh.nElements_global,
self.nDOF_test_element[0],
self.nDOF_trial_element[0]), 'd')
cfemIntegrals.updateMassJacobian_weak_lowmem(dm,
self.q[('w', 0)],
self.q[('w*dV_m', 0)],
elementMassMatrix)
self.MC_a = nzval_cMatrix.copy()
self.MC_global = SparseMat(self.nFreeDOF_global[0],
self.nFreeDOF_global[0],
nnz_cMatrix,
self.MC_a,
colind_cMatrix,
rowptr_cMatrix)
cfemIntegrals.zeroJacobian_CSR(nnz_cMatrix, self.MC_global)
cfemIntegrals.updateGlobalJacobianFromElementJacobian_CSR(self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.csrRowIndeces[(0, 0)] // 3 // 3,
self.csrColumnOffsets[(0, 0)] // 3,
elementMassMatrix,
self.MC_global)
diamD2 = np.sum(self.q['abs(det(J))'][:] * self.elementQuadratureWeights[('u', 0)])
self.ML = np.zeros((self.nFreeDOF_global[0],), 'd')
self.hReg = np.zeros((self.nFreeDOF_global[0],), 'd')
for i in range(self.nFreeDOF_global[0]):
self.ML[i] = self.MC_a[rowptr_cMatrix[i]:rowptr_cMatrix[i + 1]].sum()
self.hReg[i] = self.ML[i] / diamD2 * self.u[0].dof.max()
# np.testing.assert_almost_equal(self.ML.sum(), self.mesh.volume, err_msg="Trace of lumped mass matrix should be the domain volume",verbose=True)
# np.testing.assert_almost_equal(self.ML.sum(), diamD2, err_msg="Trace of lumped mass matrix should be the domain volume",verbose=True)
for d in range(self.nSpace_global): # spatial dimensions
# C matrices
self.cterm[d] = np.zeros((self.mesh.nElements_global,
self.nDOF_test_element[0],
self.nDOF_trial_element[0]), 'd')
self.cterm_a[d] = nzval_cMatrix.copy()
self.cterm_global[d] = LAT.SparseMat(self.nFreeDOF_global[0],
self.nFreeDOF_global[0],
nnz_cMatrix,
self.cterm_a[d],
colind_cMatrix,
rowptr_cMatrix)
cfemIntegrals.zeroJacobian_CSR(nnz_cMatrix, self.cterm_global[d])
di[:] = 0.0
di[..., d] = 1.0
cfemIntegrals.updateHamiltonianJacobian_weak_lowmem(di,
self.q[('grad(w)*dV_f', 0)],
self.q[('w', 0)],
self.cterm[d]) # int[(di*grad(wj))*wi*dV]
cfemIntegrals.updateGlobalJacobianFromElementJacobian_CSR(self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.csrRowIndeces[(0, 0)] // 3 // 3,
self.csrColumnOffsets[(0, 0)] // 3,
self.cterm[d],
self.cterm_global[d])
# C Transpose matrices
self.cterm_transpose[d] = np.zeros((self.mesh.nElements_global,
self.nDOF_test_element[0],
self.nDOF_trial_element[0]), 'd')
self.cterm_a_transpose[d] = nzval_cMatrix.copy()
self.cterm_global_transpose[d] = LAT.SparseMat(self.nFreeDOF_global[0],
self.nFreeDOF_global[0],
nnz_cMatrix,
self.cterm_a_transpose[d],
colind_cMatrix,
rowptr_cMatrix)
cfemIntegrals.zeroJacobian_CSR(nnz_cMatrix, self.cterm_global_transpose[d])
di[:] = 0.0
di[..., d] = -1.0
cfemIntegrals.updateAdvectionJacobian_weak_lowmem(di,
self.q[('w', 0)],
self.q[('grad(w)*dV_f', 0)],
self.cterm_transpose[d]) # -int[(-di*grad(wi))*wj*dV]
cfemIntegrals.updateGlobalJacobianFromElementJacobian_CSR(self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.l2g[0]['nFreeDOF'],
self.l2g[0]['freeLocal'],
self.csrRowIndeces[(0, 0)] // 3 // 3,
self.csrColumnOffsets[(0, 0)] // 3,
self.cterm_transpose[d],
self.cterm_global_transpose[d])
#
self.rowptr_cMatrix, self.colind_cMatrix, self.Cx = self.cterm_global[0].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, self.Cy = self.cterm_global[1].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, self.CTx = self.cterm_global_transpose[0].getCSRrepresentation()
rowptr_cMatrix, colind_cMatrix, self.CTy = self.cterm_global_transpose[1].getCSRrepresentation()
# (mql): I am assuming all variables live on the same FE space
self.numDOFsPerEqn = self.u[0].dof.size
self.numNonZeroEntries = len(self.Cx)
#
def updateConstrainedDOFs(self):
# get indices for constrained DOFs
if self.constrainedDOFsIndices is None:
self.constrainedDOFsIndices = []
self.constrainedDOFsIndices = self.coefficients.constrainedDOFs[0](self.dofsXCoord,
self.dofsYCoord)
for i in self.constrainedDOFsIndices:
x = self.dofsXCoord[i]
y = self.dofsYCoord[i]
(h, hu, hv) = self.coefficients.constrainedDOFs[1](x,y,self.timeIntegration.t,
self.u[0].dof[i],
self.u[1].dof[i],
self.u[2].dof[i])
if h is not None:
self.u[0].dof[i] = h
if hu is not None:
self.u[1].dof[i] = hu
if hv is not None:
self.u[2].dof[i] = hv
#
def updateAllReflectingBoundaryConditions(self):
self.forceStrongConditions = False
for dummy, index in enumerate(self.boundaryIndex):
vx = self.u[1].dof[index]
vy = self.u[2].dof[index]
vt = vx * self.normaly[index] - vy * self.normalx[index]
self.u[1].dof[index] = vt * self.normaly[index]
self.u[2].dof[index] = -vt * self.normalx[index]
#
def updatePartialReflectingBoundaryConditions(self):
for dummy, index in enumerate(self.reflectingBoundaryIndex):
vx = self.u[1].dof[index]
vy = self.u[2].dof[index]
vt = vx * self.normaly[index] - vy * self.normalx[index]
self.u[1].dof[index] = vt * self.normaly[index]
self.u[2].dof[index] = -vt * self.normalx[index]
#
def initDataStructures(self):
comm = Comm.get()
# old vectors
self.h_dof_old = np.copy(self.u[0].dof)
self.hu_dof_old = np.copy(self.u[1].dof)
self.hv_dof_old = np.copy(self.u[2].dof)
# hEps
self.eps = 1E-5
self.hEps = self.eps * comm.globalMax(self.u[0].dof.max())
# size_of_domain used in relaxation of bounds
self.size_of_domain = self.mesh.globalMesh.volume
# normal vectors
self.normalx = np.zeros(self.u[0].dof.shape, 'd')
self.normaly = np.zeros(self.u[0].dof.shape, 'd')
# quantDOFs
self.quantDOFs = np.zeros(self.u[0].dof.shape, 'd')
# boundary Index: I do this in preStep since I need normalx and normaly to be initialized first
# Allocate space for dLow (for the first stage in the SSP method)
self.dLow = np.zeros(self.Cx.shape, 'd')
# get coordinates of DOFs
self.getDOFsCoord()
# some vectors for convex limiting
self.urelax = np.zeros(self.u[0].dof.shape, 'd')
self.drelax = np.zeros(self.u[0].dof.shape, 'd')
self.dH_minus_dL = np.zeros(self.Cx.shape, 'd')
self.muH_minus_muL = np.zeros(self.Cx.shape, 'd')
#
self.hLow = np.zeros(self.u[0].dof.shape, 'd')
self.huLow = np.zeros(self.u[0].dof.shape, 'd')
self.hvLow = np.zeros(self.u[0].dof.shape, 'd')
#
self.h_min = np.zeros(self.u[0].dof.shape, 'd')
self.h_max = np.zeros(self.u[0].dof.shape, 'd')
self.kin_max = np.zeros(self.u[0].dof.shape, 'd')
#
self.extendedSourceTerm_hu = np.zeros(self.u[0].dof.shape, 'd')
self.extendedSourceTerm_hv = np.zeros(self.u[0].dof.shape, 'd')
self.new_SourceTerm_hu = np.zeros(self.u[0].dof.shape, 'd')
self.new_SourceTerm_hv = np.zeros(self.u[0].dof.shape, 'd')
self.global_entropy_residual = np.zeros(self.u[0].dof.shape, 'd')
self.dij_small = 0.0
# PARALLEL VECTORS #
n=self.u[0].par_dof.dim_proc
N=self.u[0].femSpace.dofMap.nDOF_all_processes
nghosts = self.u[0].par_dof.nghosts
subdomain2global=self.u[0].femSpace.dofMap.subdomain2global
#
self.par_urelax = LAT.ParVec_petsc4py(self.urelax,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_drelax = LAT.ParVec_petsc4py(self.drelax,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_hLow = LAT.ParVec_petsc4py(self.hLow,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_huLow = LAT.ParVec_petsc4py(self.huLow,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_hvLow = LAT.ParVec_petsc4py(self.hvLow,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
#
self.par_h_min = LAT.ParVec_petsc4py(self.h_min,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_h_max = LAT.ParVec_petsc4py(self.h_max,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_kin_max = LAT.ParVec_petsc4py(self.kin_max,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
#
self.par_extendedSourceTerm_hu = LAT.ParVec_petsc4py(self.extendedSourceTerm_hu,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_extendedSourceTerm_hv = LAT.ParVec_petsc4py(self.extendedSourceTerm_hv,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_new_SourceTerm_hu = LAT.ParVec_petsc4py(self.new_SourceTerm_hu,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_new_SourceTerm_hv = LAT.ParVec_petsc4py(self.new_SourceTerm_hv,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
#
self.par_global_entropy_residual = LAT.ParVec_petsc4py(self.global_entropy_residual,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_normalx = LAT.ParVec_petsc4py(self.normalx,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_normaly = LAT.ParVec_petsc4py(self.normaly,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_ML = LAT.ParVec_petsc4py(self.ML,
bs=1,
n=n,N=N,nghosts=nghosts,
subdomain2global=subdomain2global)
self.par_ML.scatter_forward_insert()
self.urelax = 1.0 + pow(np.sqrt(np.sqrt(self.ML / self.size_of_domain)),3)
self.drelax = 1.0 - pow(np.sqrt(np.sqrt(self.ML / self.size_of_domain)),3)
self.par_urelax.scatter_forward_insert()
self.par_drelax.scatter_forward_insert()
#
self.dataStructuresInitialized = True
#
def getResidual(self, u, r):
"""
Calculate the element residuals and add in to the global residual
"""
# COMPUTE C MATRIX #
if self.cterm_global is None:
self.getCMatrices()
# INIT DATA STRUCTURES #
if self.dataStructuresInitialized == False:
self.initDataStructures()
# LOAD THE UNKNOWNS INTO THE FINITE ELEMENT DOFs #
self.timeIntegration.calculateCoefs()
self.timeIntegration.calculateU(u)
self.setUnknowns(self.timeIntegration.u)
# SET TO ZERO RESIDUAL #
r.fill(0.0)
# REFLECTING BOUNDARY CONDITIONS ON ALL BOUNDARIES #
if self.reflectingBoundaryConditions and self.boundaryIndex is not None:
self.updateAllReflectingBoundaryConditions()
# REFLECTING BOUNDARY CONDITIONS ON PARTIAL BOUNDARIES #
if not self.reflectingBoundaryConditions and self.reflectingBoundaryIndex is not None:
self.updatePartialReflectingBoundaryConditions()
#
# INIT BOUNDARY INDEX #
# NOTE: this must be done after the first call to getResidual
# (to have normalx and normaly initialized)
# I do this in preStep if firstStep=True
# CONSTRAINT DOFs #
if self.coefficients.constrainedDOFs is not None:
self.updateConstrainedDOFs()
#
# DIRICHLET BOUNDARY CONDITIONS #
if self.forceStrongConditions:
for cj in range(len(self.dirichletConditionsForceDOF)):
for dofN, g in list(self.dirichletConditionsForceDOF[cj].DOFBoundaryConditionsDict.items()):
self.u[cj].dof[dofN] = g(self.dirichletConditionsForceDOF[cj].DOFBoundaryPointDict[dofN], self.timeIntegration.t)
#
# CHECK POSITIVITY OF WATER HEIGHT
if (self.check_positivity_water_height == True):
assert self.u[0].dof.min() >= -self.eps * self.u[0].dof.max(), ("Negative water height: ", self.u[0].dof.min())
# lets call calculate EV first and distribute
self.computeEV()
self.par_global_entropy_residual.scatter_forward_insert()
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["mesh_trial_ref"] = self.u[0].femSpace.elementMaps.psi
argsDict["mesh_grad_trial_ref"] = self.u[0].femSpace.elementMaps.grad_psi
argsDict["mesh_dof"] = self.mesh.nodeArray
argsDict["mesh_l2g"] = self.mesh.elementNodesArray
argsDict["dV_ref"] = self.elementQuadratureWeights[('u', 0)]
argsDict["h_trial_ref"] = self.u[0].femSpace.psi
argsDict["h_grad_trial_ref"] = self.u[0].femSpace.grad_psi
argsDict["h_test_ref"] = self.u[0].femSpace.psi
argsDict["h_grad_test_ref"] = self.u[0].femSpace.grad_psi
argsDict["vel_trial_ref"] = self.u[1].femSpace.psi
argsDict["vel_grad_trial_ref"] = self.u[1].femSpace.grad_psi
argsDict["vel_test_ref"] = self.u[1].femSpace.psi
argsDict["vel_grad_test_ref"] = self.u[1].femSpace.grad_psi
argsDict["mesh_trial_trace_ref"] = self.u[0].femSpace.elementMaps.psi_trace
argsDict["mesh_grad_trial_trace_ref"] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict["h_trial_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["h_grad_trial_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["h_test_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["h_grad_test_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["vel_trial_trace_ref"] = self.u[1].femSpace.psi_trace
argsDict["vel_grad_trial_trace_ref"] = self.u[1].femSpace.grad_psi_trace
argsDict["vel_test_trace_ref"] = self.u[1].femSpace.psi_trace
argsDict["vel_grad_test_trace_ref"] = self.u[1].femSpace.grad_psi_trace
argsDict["normal_ref"] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict["boundaryJac_ref"] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict["elementDiameter"] = self.elementDiameter
argsDict["nElements_global"] = self.mesh.nElements_global
argsDict["g"] = self.coefficients.g
argsDict["h_l2g"] = self.u[0].femSpace.dofMap.l2g
argsDict["vel_l2g"] = self.u[1].femSpace.dofMap.l2g
argsDict["h_dof_old"] = self.h_dof_old
argsDict["hu_dof_old"] = self.hu_dof_old
argsDict["hv_dof_old"] = self.hv_dof_old
argsDict["b_dof"] = self.coefficients.b.dof
argsDict["h_dof"] = self.u[0].dof
argsDict["hu_dof"] = self.u[1].dof
argsDict["hv_dof"] = self.u[2].dof
argsDict["q_cfl"] = self.q[('cfl', 0)]
argsDict["sdInfo_hu_hu_rowptr"] = self.coefficients.sdInfo[(1, 1)][0]
argsDict["sdInfo_hu_hu_colind"] = self.coefficients.sdInfo[(1, 1)][1]
argsDict["sdInfo_hu_hv_rowptr"] = self.coefficients.sdInfo[(1, 2)][0]
argsDict["sdInfo_hu_hv_colind"] = self.coefficients.sdInfo[(1, 2)][1]
argsDict["sdInfo_hv_hv_rowptr"] = self.coefficients.sdInfo[(2, 2)][0]
argsDict["sdInfo_hv_hv_colind"] = self.coefficients.sdInfo[(2, 2)][1]
argsDict["sdInfo_hv_hu_rowptr"] = self.coefficients.sdInfo[(2, 1)][0]
argsDict["sdInfo_hv_hu_colind"] = self.coefficients.sdInfo[(2, 1)][1]
argsDict["offset_h"] = self.offset[0]
argsDict["offset_hu"] = self.offset[1]
argsDict["offset_hv"] = self.offset[2]
argsDict["stride_h"] = self.stride[0]
argsDict["stride_hu"] = self.stride[1]
argsDict["stride_hv"] = self.stride[2]
argsDict["globalResidual"] = r
argsDict["nExteriorElementBoundaries_global"] = self.mesh.nExteriorElementBoundaries_global
argsDict["exteriorElementBoundariesArray"] = self.mesh.exteriorElementBoundariesArray
argsDict["elementBoundaryElementsArray"] = self.mesh.elementBoundaryElementsArray
argsDict["elementBoundaryLocalElementBoundariesArray"] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict["isDOFBoundary_h"] = self.numericalFlux.isDOFBoundary[0]
argsDict["isDOFBoundary_hu"] = self.numericalFlux.isDOFBoundary[1]
argsDict["isDOFBoundary_hv"] = self.numericalFlux.isDOFBoundary[2]
argsDict["isAdvectiveFluxBoundary_h"] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict["isAdvectiveFluxBoundary_hu"] = self.ebqe[('advectiveFlux_bc_flag', 1)]
argsDict["isAdvectiveFluxBoundary_hv"] = self.ebqe[('advectiveFlux_bc_flag', 2)]
argsDict["isDiffusiveFluxBoundary_hu"] = self.ebqe[('diffusiveFlux_bc_flag', 1, 1)]
argsDict["isDiffusiveFluxBoundary_hv"] = self.ebqe[('diffusiveFlux_bc_flag', 2, 2)]
argsDict["ebqe_bc_h_ext"] = self.numericalFlux.ebqe[('u', 0)]
argsDict["ebqe_bc_flux_mass_ext"] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict["ebqe_bc_flux_mom_hu_adv_ext"] = self.ebqe[('advectiveFlux_bc', 1)]
argsDict["ebqe_bc_flux_mom_hv_adv_ext"] = self.ebqe[('advectiveFlux_bc', 2)]
argsDict["ebqe_bc_hu_ext"] = self.numericalFlux.ebqe[('u', 1)]
argsDict["ebqe_bc_flux_hu_diff_ext"] = self.ebqe[('diffusiveFlux_bc', 1, 1)]
argsDict["ebqe_penalty_ext"] = self.ebqe[('penalty')]
argsDict["ebqe_bc_hv_ext"] = self.numericalFlux.ebqe[('u', 2)]
argsDict["ebqe_bc_flux_hv_diff_ext"] = self.ebqe[('diffusiveFlux_bc', 2, 2)]
argsDict["q_velocity"] = self.q[('velocity', 0)]
argsDict["ebqe_velocity"] = self.ebqe[('velocity', 0)]
argsDict["flux"] = self.ebq_global[('totalFlux', 0)]
argsDict["elementResidual_h"] = self.elementResidual[0]
argsDict["Cx"] = self.Cx
argsDict["Cy"] = self.Cy
argsDict["CTx"] = self.CTx
argsDict["CTy"] = self.CTy
argsDict["numDOFsPerEqn"] = self.numDOFsPerEqn
argsDict["NNZ"] = self.numNonZeroEntries
argsDict["csrRowIndeces_DofLoops"] = self.rowptr_cMatrix
argsDict["csrColumnOffsets_DofLoops"] = self.colind_cMatrix
argsDict["lumped_mass_matrix"] = self.ML
argsDict["cfl_run"] = self.timeIntegration.runCFL
argsDict["eps"] = self.eps
argsDict["hEps"] = self.hEps
argsDict["hnp1_at_quad_point"] = self.q[('u', 0)]
argsDict["hunp1_at_quad_point"] = self.q[('u', 1)]
argsDict["hvnp1_at_quad_point"] = self.q[('u', 2)]
argsDict["extendedSourceTerm_hu"] = self.extendedSourceTerm_hu
argsDict["extendedSourceTerm_hv"] = self.extendedSourceTerm_hv
argsDict["dH_minus_dL"] = self.dH_minus_dL
argsDict["muH_minus_muL"] = self.muH_minus_muL
argsDict["cE"] = float(self.coefficients.cE)
argsDict["LUMPED_MASS_MATRIX"] = self.coefficients.LUMPED_MASS_MATRIX
argsDict["dt"] = self.timeIntegration.dt
argsDict["LINEAR_FRICTION"] = self.coefficients.LINEAR_FRICTION
argsDict["mannings"] = float(self.coefficients.mannings)
argsDict["quantDOFs"] = self.quantDOFs
argsDict["SECOND_CALL_CALCULATE_RESIDUAL"] = self.secondCallCalculateResidual
argsDict["COMPUTE_NORMALS"] = self.COMPUTE_NORMALS
argsDict["normalx"] = self.normalx
argsDict["normaly"] = self.normaly
argsDict["dLow"] = self.dLow
argsDict["lstage"] = self.timeIntegration.lstage
argsDict["new_SourceTerm_hu"] = self.new_SourceTerm_hu
argsDict["new_SourceTerm_hv"] = self.new_SourceTerm_hv
argsDict["global_entropy_residual"] = self.global_entropy_residual
argsDict["dij_small"] = self.dij_small
argsDict["hLow"] = self.hLow
argsDict["huLow"] = self.huLow
argsDict["hvLow"] = self.hvLow
argsDict["h_min"] = self.h_min
argsDict["h_max"] = self.h_max
argsDict["kin_max"] = self.kin_max
argsDict["size_of_domain"] = self.size_of_domain
argsDict["urelax"] = self.urelax
argsDict["drelax"] = self.drelax
## call calculate residual
self.calculateResidual(argsDict)
## distribute local bounds and low order solutions (with bar states)
self.par_hLow.scatter_forward_insert()
self.par_huLow.scatter_forward_insert()
self.par_hvLow.scatter_forward_insert()
#
self.par_h_min.scatter_forward_insert()
self.par_h_max.scatter_forward_insert()
self.par_kin_max.scatter_forward_insert()
## distribute source terms (not sure if needed)
self.par_extendedSourceTerm_hu.scatter_forward_insert()
self.par_extendedSourceTerm_hv.scatter_forward_insert()
##
self.par_new_SourceTerm_hu.scatter_forward_insert()
self.par_new_SourceTerm_hv.scatter_forward_insert()
if self.COMPUTE_NORMALS==1:
self.par_normalx.scatter_forward_insert()
self.par_normaly.scatter_forward_insert()
self.COMPUTE_NORMALS = 0
#
# for reflecting conditions on all boundaries
if self.reflectingBoundaryConditions and self.boundaryIndex is not None:
for dummy, index in enumerate(self.boundaryIndex):
r[self.offset[1]+self.stride[1]*index]=0.
r[self.offset[2]+self.stride[2]*index]=0.
#
#
# for reflecting conditions on partial boundaries
if not self.reflectingBoundaryConditions and self.reflectingBoundaryIndex is not None:
for dummy, index in enumerate(self.reflectingBoundaryIndex):
r[self.offset[1]+self.stride[1]*index]=0.
r[self.offset[2]+self.stride[2]*index]=0.
#
#
if self.forceStrongConditions:
for cj in range(len(self.dirichletConditionsForceDOF)):
for dofN, g in list(self.dirichletConditionsForceDOF[cj].DOFBoundaryConditionsDict.items()):
r[self.offset[cj] + self.stride[cj] * dofN] = 0.
#
if self.constrainedDOFsIndices is not None:
for index in self.constrainedDOFsIndices:
for cj in range(self.nc):
global_dofN = self.offset[cj] + self.stride[cj] * index
r[global_dofN] = 0.
#
logEvent("Global residual SWEs: ", level=9, data=r)
# mwf decide if this is reasonable for keeping solver statistics
self.nonlinear_function_evaluations += 1
def getJacobian(self, jacobian):
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian,
jacobian)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["mesh_trial_ref"] = self.u[0].femSpace.elementMaps.psi
argsDict["mesh_grad_trial_ref"] = self.u[0].femSpace.elementMaps.grad_psi
argsDict["mesh_dof"] = self.mesh.nodeArray
argsDict["mesh_velocity_dof"] = self.mesh.nodeVelocityArray
argsDict["MOVING_DOMAIN"] = self.MOVING_DOMAIN
argsDict["mesh_l2g"] = self.mesh.elementNodesArray
argsDict["dV_ref"] = self.elementQuadratureWeights[('u', 0)]
argsDict["h_trial_ref"] = self.u[0].femSpace.psi
argsDict["h_grad_trial_ref"] = self.u[0].femSpace.grad_psi
argsDict["h_test_ref"] = self.u[0].femSpace.psi
argsDict["h_grad_test_ref"] = self.u[0].femSpace.grad_psi
argsDict["vel_trial_ref"] = self.u[1].femSpace.psi
argsDict["vel_grad_trial_ref"] = self.u[1].femSpace.grad_psi
argsDict["vel_test_ref"] = self.u[1].femSpace.psi
argsDict["vel_grad_test_ref"] = self.u[1].femSpace.grad_psi
argsDict["mesh_trial_trace_ref"] = self.u[0].femSpace.elementMaps.psi_trace
argsDict["mesh_grad_trial_trace_ref"] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict["dS_ref"] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict["h_trial_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["h_grad_trial_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["h_test_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["h_grad_test_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["vel_trial_trace_ref"] = self.u[1].femSpace.psi_trace
argsDict["vel_grad_trial_trace_ref"] = self.u[1].femSpace.grad_psi_trace
argsDict["vel_test_trace_ref"] = self.u[1].femSpace.psi_trace
argsDict["vel_grad_test_trace_ref"] = self.u[1].femSpace.grad_psi_trace
argsDict["normal_ref"] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict["boundaryJac_ref"] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict["elementDiameter"] = self.elementDiameter
argsDict["nElements_global"] = self.mesh.nElements_global
argsDict["g"] = self.coefficients.g
argsDict["h_l2g"] = self.u[0].femSpace.dofMap.l2g
argsDict["vel_l2g"] = self.u[1].femSpace.dofMap.l2g
argsDict["b_dof"] = self.coefficients.b.dof
argsDict["h_dof"] = self.u[0].dof
argsDict["hu_dof"] = self.u[1].dof
argsDict["hv_dof"] = self.u[2].dof
argsDict["q_cfl"] = self.q[('cfl', 0)]
argsDict["sdInfo_hu_hu_rowptr"] = self.coefficients.sdInfo[(1, 1)][0]
argsDict["sdInfo_hu_hu_colind"] = self.coefficients.sdInfo[(1, 1)][1]
argsDict["sdInfo_hu_hv_rowptr"] = self.coefficients.sdInfo[(1, 2)][0]
argsDict["sdInfo_hu_hv_colind"] = self.coefficients.sdInfo[(1, 2)][1]
argsDict["sdInfo_hv_hv_rowptr"] = self.coefficients.sdInfo[(2, 2)][0]
argsDict["sdInfo_hv_hv_colind"] = self.coefficients.sdInfo[(2, 2)][1]
argsDict["sdInfo_hv_hu_rowptr"] = self.coefficients.sdInfo[(2, 1)][0]
argsDict["sdInfo_hv_hu_colind"] = self.coefficients.sdInfo[(2, 1)][1]
argsDict["csrRowIndeces_h_h"] = self.csrRowIndeces[(0, 0)]
argsDict["csrColumnOffsets_h_h"] = self.csrColumnOffsets[(0, 0)]
argsDict["csrRowIndeces_h_hu"] = self.csrRowIndeces[(0, 1)]
argsDict["csrColumnOffsets_h_hu"] = self.csrColumnOffsets[(0, 1)]
argsDict["csrRowIndeces_h_hv"] = self.csrRowIndeces[(0, 2)]
argsDict["csrColumnOffsets_h_hv"] = self.csrColumnOffsets[(0, 2)]
argsDict["csrRowIndeces_hu_h"] = self.csrRowIndeces[(1, 0)]
argsDict["csrColumnOffsets_hu_h"] = self.csrColumnOffsets[(1, 0)]
argsDict["csrRowIndeces_hu_hu"] = self.csrRowIndeces[(1, 1)]
argsDict["csrColumnOffsets_hu_hu"] = self.csrColumnOffsets[(1, 1)]
argsDict["csrRowIndeces_hu_hv"] = self.csrRowIndeces[(1, 2)]
argsDict["csrColumnOffsets_hu_hv"] = self.csrColumnOffsets[(1, 2)]
argsDict["csrRowIndeces_hv_h"] = self.csrRowIndeces[(2, 0)]
argsDict["csrColumnOffsets_hv_h"] = self.csrColumnOffsets[(2, 0)]
argsDict["csrRowIndeces_hv_hu"] = self.csrRowIndeces[(2, 1)]
argsDict["csrColumnOffsets_hv_hu"] = self.csrColumnOffsets[(2, 1)]
argsDict["csrRowIndeces_hv_hv"] = self.csrRowIndeces[(2, 2)]
argsDict["csrColumnOffsets_hv_hv"] = self.csrColumnOffsets[(2, 2)]
argsDict["globalJacobian"] = jacobian.getCSRrepresentation()[2]
argsDict["nExteriorElementBoundaries_global"] = self.mesh.nExteriorElementBoundaries_global
argsDict["exteriorElementBoundariesArray"] = self.mesh.exteriorElementBoundariesArray
argsDict["elementBoundaryElementsArray"] = self.mesh.elementBoundaryElementsArray
argsDict["elementBoundaryLocalElementBoundariesArray"] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict["isDOFBoundary_h"] = self.numericalFlux.isDOFBoundary[0]
argsDict["isDOFBoundary_hu"] = self.numericalFlux.isDOFBoundary[1]
argsDict["isDOFBoundary_hv"] = self.numericalFlux.isDOFBoundary[2]
argsDict["isAdvectiveFluxBoundary_h"] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict["isAdvectiveFluxBoundary_hu"] = self.ebqe[('advectiveFlux_bc_flag', 1)]
argsDict["isAdvectiveFluxBoundary_hv"] = self.ebqe[('advectiveFlux_bc_flag', 2)]
argsDict["isDiffusiveFluxBoundary_hu"] = self.ebqe[('diffusiveFlux_bc_flag', 1, 1)]
argsDict["isDiffusiveFluxBoundary_hv"] = self.ebqe[('diffusiveFlux_bc_flag', 2, 2)]
argsDict["ebqe_bc_h_ext"] = self.numericalFlux.ebqe[('u', 0)]
argsDict["ebqe_bc_flux_mass_ext"] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict["ebqe_bc_flux_mom_hu_adv_ext"] = self.ebqe[('advectiveFlux_bc', 1)]
argsDict["ebqe_bc_flux_mom_hv_adv_ext"] = self.ebqe[('advectiveFlux_bc', 2)]
argsDict["ebqe_bc_hu_ext"] = self.numericalFlux.ebqe[('u', 1)]
argsDict["ebqe_bc_flux_hu_diff_ext"] = self.ebqe[('diffusiveFlux_bc', 1, 1)]
argsDict["ebqe_penalty_ext"] = self.ebqe[('penalty')]
argsDict["ebqe_bc_hv_ext"] = self.numericalFlux.ebqe[('u', 2)]
argsDict["ebqe_bc_flux_hv_diff_ext"] = self.ebqe[('diffusiveFlux_bc', 2, 2)]
argsDict["csrColumnOffsets_eb_h_h"] = self.csrColumnOffsets_eb[(0, 0)]
argsDict["csrColumnOffsets_eb_h_hu"] = self.csrColumnOffsets_eb[(0, 1)]
argsDict["csrColumnOffsets_eb_h_hv"] = self.csrColumnOffsets_eb[(0, 2)]
argsDict["csrColumnOffsets_eb_hu_h"] = self.csrColumnOffsets_eb[(1, 0)]
argsDict["csrColumnOffsets_eb_hu_hu"] = self.csrColumnOffsets_eb[(1, 1)]
argsDict["csrColumnOffsets_eb_hu_hv"] = self.csrColumnOffsets_eb[(1, 2)]
argsDict["csrColumnOffsets_eb_hv_h"] = self.csrColumnOffsets_eb[(2, 0)]
argsDict["csrColumnOffsets_eb_hv_hu"] = self.csrColumnOffsets_eb[(2, 1)]
argsDict["csrColumnOffsets_eb_hv_hv"] = self.csrColumnOffsets_eb[(2, 2)]
argsDict["dt"] = self.timeIntegration.dt
self.calculateJacobian(argsDict)
# for reflecting conditions on ALL boundaries
if self.reflectingBoundaryConditions and self.boundaryIndex is not None:
for dummy, index in enumerate(self.boundaryIndex):
global_dofN = self.offset[1] + self.stride[1] * index
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
#
#
global_dofN = self.offset[2] + self.stride[2] * index
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
#
#
#
#
# for reflecting conditions partial boundaries
if not self.reflectingBoundaryConditions and self.reflectingBoundaryIndex is not None:
for dummy, index in enumerate(self.reflectingBoundaryIndex):
global_dofN = self.offset[1] + self.stride[1] * index
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
#
#
global_dofN = self.offset[2] + self.stride[2] * index
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
#
#
#
#
# Load the Dirichlet conditions directly into residual
if self.forceStrongConditions:
scaling = 1.0 # probably want to add some scaling to match non-dirichlet diagonals in linear system
for cj in range(self.nc):
for dofN in list(self.dirichletConditionsForceDOF[cj].DOFBoundaryConditionsDict.keys()):
global_dofN = self.offset[cj] + self.stride[cj] * dofN
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = scaling
else:
self.nzval[i] = 0.0
#
if self.constrainedDOFsIndices is not None:
for index in self.constrainedDOFsIndices:
for cj in range(self.nc):
global_dofN = self.offset[cj] + self.stride[cj] * index
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
#
logEvent("Jacobian ", level=10, data=jacobian)
# mwf decide if this is reasonable for solver statistics
self.nonlinear_function_jacobian_evaluations += 1
return jacobian
def calculateElementQuadrature(self):
"""
Calculate the physical location and weights of the quadrature rules
and the shape information at the quadrature points.
This function should be called only when the mesh changes.
"""
if self.postProcessing:
self.tmpvt.calculateElementQuadrature()
self.u[0].femSpace.elementMaps.getValues(self.elementQuadraturePoints, self.q['x'])
self.u[0].femSpace.elementMaps.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[1].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[1].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.coefficients.initializeElementQuadrature(self.timeIntegration.t, self.q)
def calculateElementBoundaryQuadrature(self):
"""
Calculate the physical location and weights of the quadrature rules
and the shape information at the quadrature points on element boundaries.
This function should be called only when the mesh changes.
"""
if self.postProcessing:
self.tmpvt.calculateElementBoundaryQuadrature()
pass
def calculateExteriorElementBoundaryQuadrature(self):
"""
Calculate the physical location and weights of the quadrature rules
and the shape information at the quadrature points on global element boundaries.
This function should be called only when the mesh changes.
"""
if self.postProcessing:
self.tmpvt.calculateExteriorElementBoundaryQuadrature()
#
# get physical locations of element boundary quadrature points
#
# assume all components live on the same mesh
self.u[0].femSpace.elementMaps.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[1].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[1].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getValuesGlobalExteriorTrace(self.elementBoundaryQuadraturePoints,
self.ebqe['x'])
self.fluxBoundaryConditionsObjectsDict = dict([(cj, FluxBoundaryConditions(self.mesh,
self.nElementBoundaryQuadraturePoints_elementBoundary,
self.ebqe[('x')],
self.advectiveFluxBoundaryConditionsSetterDict[cj],
self.diffusiveFluxBoundaryConditionsSetterDictDict[cj]))
for cj in list(self.advectiveFluxBoundaryConditionsSetterDict.keys())])
self.coefficients.initializeGlobalExteriorElementBoundaryQuadrature(self.timeIntegration.t, self.ebqe)
def estimate_mt(self):
pass
def calculateSolutionAtQuadrature(self):
pass
def calculateAuxiliaryQuantitiesAfterStep(self):
OneLevelTransport.calculateAuxiliaryQuantitiesAfterStep(self)
def getForce(self, cg, forceExtractionFaces, force, moment):
pass
| mit |
yousafsyed/casperjs | bin/Lib/logging/config.py | 83 | 35727 | # Copyright 2001-2014 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
Copyright (C) 2001-2014 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import errno
import io
import logging
import logging.handlers
import re
import struct
import sys
import traceback
try:
import _thread as thread
import threading
except ImportError: #pragma: no cover
thread = None
from socketserver import ThreadingTCPServer, StreamRequestHandler
DEFAULT_LOGGING_CONFIG_PORT = 9030
RESET_ERROR = errno.ECONNRESET
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
_listener = None
def fileConfig(fname, defaults=None, disable_existing_loggers=True):
"""
Read the logging configuration from a ConfigParser-format file.
This can be called several times from an application, allowing an end user
the ability to select from various pre-canned configurations (if the
developer provides a mechanism to present the choices and load the chosen
configuration).
"""
import configparser
if isinstance(fname, configparser.RawConfigParser):
cp = fname
else:
cp = configparser.ConfigParser(defaults)
if hasattr(fname, 'readline'):
cp.read_file(fname)
else:
cp.read(fname)
formatters = _create_formatters(cp)
# critical section
logging._acquireLock()
try:
logging._handlers.clear()
del logging._handlerList[:]
# Handlers add themselves to logging._handlers
handlers = _install_handlers(cp, formatters)
_install_loggers(cp, handlers, disable_existing_loggers)
finally:
logging._releaseLock()
def _resolve(name):
"""Resolve a dotted name to a global object."""
name = name.split('.')
used = name.pop(0)
found = __import__(used)
for n in name:
used = used + '.' + n
try:
found = getattr(found, n)
except AttributeError:
__import__(used)
found = getattr(found, n)
return found
def _strip_spaces(alist):
return map(lambda x: x.strip(), alist)
def _create_formatters(cp):
"""Create and return formatters"""
flist = cp["formatters"]["keys"]
if not len(flist):
return {}
flist = flist.split(",")
flist = _strip_spaces(flist)
formatters = {}
for form in flist:
sectname = "formatter_%s" % form
fs = cp.get(sectname, "format", raw=True, fallback=None)
dfs = cp.get(sectname, "datefmt", raw=True, fallback=None)
c = logging.Formatter
class_name = cp[sectname].get("class")
if class_name:
c = _resolve(class_name)
f = c(fs, dfs)
formatters[form] = f
return formatters
def _install_handlers(cp, formatters):
"""Install and return handlers"""
hlist = cp["handlers"]["keys"]
if not len(hlist):
return {}
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
handlers = {}
fixups = [] #for inter-handler references
for hand in hlist:
section = cp["handler_%s" % hand]
klass = section["class"]
fmt = section.get("formatter", "")
try:
klass = eval(klass, vars(logging))
except (AttributeError, NameError):
klass = _resolve(klass)
args = section["args"]
args = eval(args, vars(logging))
h = klass(*args)
if "level" in section:
level = section["level"]
h.setLevel(level)
if len(fmt):
h.setFormatter(formatters[fmt])
if issubclass(klass, logging.handlers.MemoryHandler):
target = section.get("target", "")
if len(target): #the target handler may not be loaded yet, so keep for later...
fixups.append((h, target))
handlers[hand] = h
#now all handlers are loaded, fixup inter-handler references...
for h, t in fixups:
h.setTarget(handlers[t])
return handlers
def _handle_existing_loggers(existing, child_loggers, disable_existing):
"""
When (re)configuring logging, handle loggers which were in the previous
configuration but are not in the new configuration. There's no point
deleting them as other threads may continue to hold references to them;
and by disabling them, you stop them doing any logging.
However, don't disable children of named loggers, as that's probably not
what was intended by the user. Also, allow existing loggers to NOT be
disabled if disable_existing is false.
"""
root = logging.root
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
else:
logger.disabled = disable_existing
def _install_loggers(cp, handlers, disable_existing):
"""Create and install loggers"""
# configure the root first
llist = cp["loggers"]["keys"]
llist = llist.split(",")
llist = list(map(lambda x: x.strip(), llist))
llist.remove("root")
section = cp["logger_root"]
root = logging.root
log = root
if "level" in section:
level = section["level"]
log.setLevel(level)
for h in root.handlers[:]:
root.removeHandler(h)
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
log.addHandler(handlers[hand])
#and now the others...
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
for log in llist:
section = cp["logger_%s" % log]
qn = section["qualname"]
propagate = section.getint("propagate", fallback=1)
logger = logging.getLogger(qn)
if qn in existing:
i = existing.index(qn) + 1 # start with the entry after qn
prefixed = qn + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(qn)
if "level" in section:
level = section["level"]
logger.setLevel(level)
for h in logger.handlers[:]:
logger.removeHandler(h)
logger.propagate = propagate
logger.disabled = 0
hlist = section["handlers"]
if len(hlist):
hlist = hlist.split(",")
hlist = _strip_spaces(hlist)
for hand in hlist:
logger.addHandler(handlers[hand])
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = 1
# elif disable_existing_loggers:
# logger.disabled = 1
_handle_existing_loggers(existing, child_loggers, disable_existing)
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
class ConvertingMixin(object):
"""For ConvertingXXX's, this mixin class provides common functions"""
def convert_with_key(self, key, value, replace=True):
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
if replace:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def convert(self, value):
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict, ConvertingMixin):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
return self.convert_with_key(key, value)
def get(self, key, default=None):
value = dict.get(self, key, default)
return self.convert_with_key(key, value)
def pop(self, key, default=None):
value = dict.pop(self, key, default)
return self.convert_with_key(key, value, replace=False)
class ConvertingList(list, ConvertingMixin):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
return self.convert_with_key(key, value)
def pop(self, idx=-1):
value = list.pop(self, idx)
return self.convert(value)
class ConvertingTuple(tuple, ConvertingMixin):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
# Can't replace a tuple entry.
return self.convert_with_key(key, value, replace=False)
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = staticmethod(__import__)
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, str): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(logging._checkLevel(level))
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except Exception as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except Exception as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
deferred = []
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except Exception as e:
if 'target not configured yet' in str(e):
deferred.append(name)
else:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Now do any that were deferred
for name in deferred:
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except Exception as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict.keys())
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name) + 1 # look after name
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
while i < num_existing:
if existing[i][:pflen] == prefixed:
child_loggers.append(existing[i])
i += 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except Exception as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
#for log in existing:
# logger = root.manager.loggerDict[log]
# if log in child_loggers:
# logger.level = logging.NOTSET
# logger.handlers = []
# logger.propagate = True
# elif disable_existing:
# logger.disabled = True
_handle_existing_loggers(existing, child_loggers,
disable_existing)
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except Exception as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
style = config.get('style', '%')
result = logging.Formatter(fmt, dfmt, style)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except Exception as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
config_copy = dict(config) # for restoring in case of error
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except Exception as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not callable(c):
c = self.resolve(c)
factory = c
else:
cname = config.pop('class')
klass = self.resolve(cname)
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
th = self.config['handlers'][config['target']]
if not isinstance(th, logging.Handler):
config.update(config_copy) # restore for deferred cfg
raise TypeError('target not configured yet')
config['target'] = th
except Exception as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
props = config.pop('.', None)
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(logging._checkLevel(level))
if filters:
self.add_filters(result, filters)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except Exception as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(logging._checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
"""
Start up a socket server on the specified port, and listen for new
configurations.
These will be sent as a file suitable for processing by fileConfig().
Returns a Thread object on which you can call start() to start the server,
and which you can join() when appropriate. To stop the server, call
stopListening().
Use the ``verify`` argument to verify any bytes received across the wire
from a client. If specified, it should be a callable which receives a
single argument - the bytes of configuration data received across the
network - and it should return either ``None``, to indicate that the
passed in bytes could not be verified and should be discarded, or a
byte string which is then passed to the configuration machinery as
normal. Note that you can return transformed bytes, e.g. by decrypting
the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a logging configuration request.
It expects a completely new logging configuration and uses fileConfig
to install it.
"""
def handle(self):
"""
Handle a request.
Each request is expected to be a 4-byte length, packed using
struct.pack(">L", n), followed by the config file.
Uses fileConfig() to do the grunt work.
"""
try:
conn = self.connection
chunk = conn.recv(4)
if len(chunk) == 4:
slen = struct.unpack(">L", chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + conn.recv(slen - len(chunk))
if self.server.verify is not None:
chunk = self.server.verify(chunk)
if chunk is not None: # verified, can process
chunk = chunk.decode("utf-8")
try:
import json
d =json.loads(chunk)
assert isinstance(d, dict)
dictConfig(d)
except Exception:
#Apply new configuration.
file = io.StringIO(chunk)
try:
fileConfig(file)
except Exception:
traceback.print_exc()
if self.server.ready:
self.server.ready.set()
except OSError as e:
if e.errno != RESET_ERROR:
raise
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
allow_reuse_address = 1
def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
handler=None, ready=None, verify=None):
ThreadingTCPServer.__init__(self, (host, port), handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
self.verify = verify
def serve_until_stopped(self):
import select
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
self.socket.close()
class Server(threading.Thread):
def __init__(self, rcvr, hdlr, port, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.port = port
self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(port=self.port, handler=self.hdlr,
ready=self.ready,
verify=self.verify)
if self.port == 0:
self.port = server.server_address[1]
self.ready.set()
global _listener
logging._acquireLock()
_listener = server
logging._releaseLock()
server.serve_until_stopped()
return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify)
def stopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _listener
logging._acquireLock()
try:
if _listener:
_listener.abort = 1
_listener = None
finally:
logging._releaseLock()
| mit |
stavka/pieva2 | svg.path-1.1/src/svg/path/tests/test_paths.py | 2 | 21060 | from __future__ import division
import unittest
from math import sqrt, pi
from ..path import CubicBezier, QuadraticBezier, Line, Arc, Path
# Most of these test points are not calculated serparately, as that would
# take too long and be too error prone. Instead the curves have been verified
# to be correct visually, by drawing them with the turtle module, with code
# like this:
#
# import turtle
# t = turtle.Turtle()
# t.penup()
#
# for arc in (path1, path2):
# p = arc.point(0)
# t.goto(p.real-500, -p.imag+300)
# t.dot(3, 'black')
# t.pendown()
# for x in range(1,101):
# p = arc.point(x*0.01)
# t.goto(p.real-500, -p.imag+300)
# t.penup()
# t.dot(3, 'black')
#
# raw_input()
#
# After the paths have been verified to be correct this way, the testing of
# points along the paths has been added as regression tests, to make sure
# nobody changes the way curves are drawn by mistake. Therefore, do not take
# these points religiously. They might be subtly wrong, unless otherwise
# noted.
class LineTest(unittest.TestCase):
def test_lines(self):
# These points are calculated, and not just regression tests.
line1 = Line(0j, 400+0j)
self.assertAlmostEqual(line1.point(0), (0j))
self.assertAlmostEqual(line1.point(0.3), (120+0j))
self.assertAlmostEqual(line1.point(0.5), (200+0j))
self.assertAlmostEqual(line1.point(0.9), (360+0j))
self.assertAlmostEqual(line1.point(1), (400+0j))
self.assertAlmostEqual(line1.length(), 400)
line2 = Line(400+0j, 400+300j)
self.assertAlmostEqual(line2.point(0), (400+0j))
self.assertAlmostEqual(line2.point(0.3), (400+90j))
self.assertAlmostEqual(line2.point(0.5), (400+150j))
self.assertAlmostEqual(line2.point(0.9), (400+270j))
self.assertAlmostEqual(line2.point(1), (400+300j))
self.assertAlmostEqual(line2.length(), 300)
line3 = Line(400+300j, 0j)
self.assertAlmostEqual(line3.point(0), (400+300j))
self.assertAlmostEqual(line3.point(0.3), (280+210j))
self.assertAlmostEqual(line3.point(0.5), (200+150j))
self.assertAlmostEqual(line3.point(0.9), (40+30j))
self.assertAlmostEqual(line3.point(1), (0j))
self.assertAlmostEqual(line3.length(), 500)
class CubicBezierTest(unittest.TestCase):
def test_approx_circle(self):
"""This is a approximate circle drawn in Inkscape"""
arc1 = CubicBezier(
complex(0,0),
complex(0,109.66797),
complex(-88.90345,198.57142),
complex(-198.57142,198.57142)
)
self.assertAlmostEqual(arc1.point(0), (0j))
self.assertAlmostEqual(arc1.point(0.1), (-2.59896457+32.20931647j))
self.assertAlmostEqual(arc1.point(0.2), (-10.12330256+62.76392816j))
self.assertAlmostEqual(arc1.point(0.3), (-22.16418039+91.25500149j))
self.assertAlmostEqual(arc1.point(0.4), (-38.31276448+117.27370288j))
self.assertAlmostEqual(arc1.point(0.5), (-58.16022125+140.41119875j))
self.assertAlmostEqual(arc1.point(0.6), (-81.29771712+160.25865552j))
self.assertAlmostEqual(arc1.point(0.7), (-107.31641851+176.40723961j))
self.assertAlmostEqual(arc1.point(0.8), (-135.80749184+188.44811744j))
self.assertAlmostEqual(arc1.point(0.9), (-166.36210353+195.97245543j))
self.assertAlmostEqual(arc1.point(1), (-198.57142+198.57142j))
arc2 = CubicBezier(
complex(-198.57142,198.57142),
complex(-109.66797-198.57142,0+198.57142),
complex(-198.57143-198.57142,-88.90345+198.57142),
complex(-198.57143-198.57142,0),
)
self.assertAlmostEqual(arc2.point(0), (-198.57142+198.57142j))
self.assertAlmostEqual(arc2.point(0.1), (-230.78073675+195.97245543j))
self.assertAlmostEqual(arc2.point(0.2), (-261.3353492+188.44811744j))
self.assertAlmostEqual(arc2.point(0.3), (-289.82642365+176.40723961j))
self.assertAlmostEqual(arc2.point(0.4), (-315.8451264+160.25865552j))
self.assertAlmostEqual(arc2.point(0.5), (-338.98262375+140.41119875j))
self.assertAlmostEqual(arc2.point(0.6), (-358.830082+117.27370288j))
self.assertAlmostEqual(arc2.point(0.7), (-374.97866745+91.25500149j))
self.assertAlmostEqual(arc2.point(0.8), (-387.0195464+62.76392816j))
self.assertAlmostEqual(arc2.point(0.9), (-394.54388515+32.20931647j))
self.assertAlmostEqual(arc2.point(1), (-397.14285+0j))
arc3 = CubicBezier(
complex(-198.57143-198.57142,0),
complex(0-198.57143-198.57142,-109.66797 ),
complex(88.90346-198.57143-198.57142,-198.57143),
complex(-198.57142,-198.57143)
)
self.assertAlmostEqual(arc3.point(0), (-397.14285+0j))
self.assertAlmostEqual(arc3.point(0.1), (-394.54388515-32.20931675j))
self.assertAlmostEqual(arc3.point(0.2), (-387.0195464-62.7639292j))
self.assertAlmostEqual(arc3.point(0.3), (-374.97866745-91.25500365j))
self.assertAlmostEqual(arc3.point(0.4), (-358.830082-117.2737064j))
self.assertAlmostEqual(arc3.point(0.5), (-338.98262375-140.41120375j))
self.assertAlmostEqual(arc3.point(0.6), (-315.8451264-160.258662j))
self.assertAlmostEqual(arc3.point(0.7), (-289.82642365-176.40724745j))
self.assertAlmostEqual(arc3.point(0.8), (-261.3353492-188.4481264j))
self.assertAlmostEqual(arc3.point(0.9), (-230.78073675-195.97246515j))
self.assertAlmostEqual(arc3.point(1), (-198.57142-198.57143j))
arc4 = CubicBezier(
complex(-198.57142,-198.57143),
complex(109.66797-198.57142,0-198.57143),
complex(0,88.90346-198.57143),
complex(0,0),
)
self.assertAlmostEqual(arc4.point(0), (-198.57142-198.57143j))
self.assertAlmostEqual(arc4.point(0.1), (-166.36210353-195.97246515j))
self.assertAlmostEqual(arc4.point(0.2), (-135.80749184-188.4481264j))
self.assertAlmostEqual(arc4.point(0.3), (-107.31641851-176.40724745j))
self.assertAlmostEqual(arc4.point(0.4), (-81.29771712-160.258662j))
self.assertAlmostEqual(arc4.point(0.5), (-58.16022125-140.41120375j))
self.assertAlmostEqual(arc4.point(0.6), (-38.31276448-117.2737064j))
self.assertAlmostEqual(arc4.point(0.7), (-22.16418039-91.25500365j))
self.assertAlmostEqual(arc4.point(0.8), (-10.12330256-62.7639292j))
self.assertAlmostEqual(arc4.point(0.9), (-2.59896457-32.20931675j))
self.assertAlmostEqual(arc4.point(1), (0j))
def test_svg_examples(self):
# M100,200 C100,100 250,100 250,200
path1 = CubicBezier(100+200j, 100+100j, 250+100j, 250+200j)
self.assertAlmostEqual(path1.point(0), (100+200j))
self.assertAlmostEqual(path1.point(0.3), (132.4+137j))
self.assertAlmostEqual(path1.point(0.5), (175+125j))
self.assertAlmostEqual(path1.point(0.9), (245.8+173j))
self.assertAlmostEqual(path1.point(1), (250+200j))
# S400,300 400,200
path2 = CubicBezier(250+200j, 250+300j, 400+300j, 400+200j)
self.assertAlmostEqual(path2.point(0), (250+200j))
self.assertAlmostEqual(path2.point(0.3), (282.4+263j))
self.assertAlmostEqual(path2.point(0.5), (325+275j))
self.assertAlmostEqual(path2.point(0.9), (395.8+227j))
self.assertAlmostEqual(path2.point(1), (400+200j))
# M100,200 C100,100 400,100 400,200
path3 = CubicBezier(100+200j, 100+100j, 400+100j, 400+200j)
self.assertAlmostEqual(path3.point(0), (100+200j))
self.assertAlmostEqual(path3.point(0.3), (164.8+137j))
self.assertAlmostEqual(path3.point(0.5), (250+125j))
self.assertAlmostEqual(path3.point(0.9), (391.6+173j))
self.assertAlmostEqual(path3.point(1), (400+200j))
# M100,500 C25,400 475,400 400,500
path4 = CubicBezier(100+500j, 25+400j, 475+400j, 400+500j)
self.assertAlmostEqual(path4.point(0), (100+500j))
self.assertAlmostEqual(path4.point(0.3), (145.9+437j))
self.assertAlmostEqual(path4.point(0.5), (250+425j))
self.assertAlmostEqual(path4.point(0.9), (407.8+473j))
self.assertAlmostEqual(path4.point(1), (400+500j))
# M100,800 C175,700 325,700 400,800
path5 = CubicBezier(100+800j, 175+700j, 325+700j, 400+800j)
self.assertAlmostEqual(path5.point(0), (100+800j))
self.assertAlmostEqual(path5.point(0.3), (183.7+737j))
self.assertAlmostEqual(path5.point(0.5), (250+725j))
self.assertAlmostEqual(path5.point(0.9), (375.4+773j))
self.assertAlmostEqual(path5.point(1), (400+800j))
# M600,200 C675,100 975,100 900,200
path6 = CubicBezier(600+200j, 675+100j, 975+100j, 900+200j)
self.assertAlmostEqual(path6.point(0), (600+200j))
self.assertAlmostEqual(path6.point(0.3), (712.05+137j))
self.assertAlmostEqual(path6.point(0.5), (806.25+125j))
self.assertAlmostEqual(path6.point(0.9), (911.85+173j))
self.assertAlmostEqual(path6.point(1), (900+200j))
# M600,500 C600,350 900,650 900,500
path7 = CubicBezier(600+500j, 600+350j, 900+650j, 900+500j)
self.assertAlmostEqual(path7.point(0), (600+500j))
self.assertAlmostEqual(path7.point(0.3), (664.8+462.2j))
self.assertAlmostEqual(path7.point(0.5), (750+500j))
self.assertAlmostEqual(path7.point(0.9), (891.6+532.4j))
self.assertAlmostEqual(path7.point(1), (900+500j))
# M600,800 C625,700 725,700 750,800
path8 = CubicBezier(600+800j, 625+700j, 725+700j, 750+800j)
self.assertAlmostEqual(path8.point(0), (600+800j))
self.assertAlmostEqual(path8.point(0.3), (638.7+737j))
self.assertAlmostEqual(path8.point(0.5), (675+725j))
self.assertAlmostEqual(path8.point(0.9), (740.4+773j))
self.assertAlmostEqual(path8.point(1), (750+800j))
# S875,900 900,800
inversion = (750+800j) + (750+800j) - (725+700j)
path9 = CubicBezier(750+800j, inversion, 875+900j, 900+800j)
self.assertAlmostEqual(path9.point(0), (750+800j))
self.assertAlmostEqual(path9.point(0.3), (788.7+863j))
self.assertAlmostEqual(path9.point(0.5), (825+875j))
self.assertAlmostEqual(path9.point(0.9), (890.4+827j))
self.assertAlmostEqual(path9.point(1), (900+800j))
def test_length(self):
# A straight line:
arc = CubicBezier(
complex(0,0),
complex(0,0),
complex(0,100),
complex(0,100)
)
self.assertAlmostEqual(arc.length(), 100)
# A diagonal line:
arc = CubicBezier(
complex(0,0),
complex(0,0),
complex(100,100),
complex(100,100)
)
self.assertAlmostEqual(arc.length(), sqrt(2*100*100))
# A quarter circle arc with radius 100:
kappa = 4*(sqrt(2)-1)/3 # http://www.whizkidtech.redprince.net/bezier/circle/
arc = CubicBezier(
complex(0,0),
complex(0,kappa*100),
complex(100-kappa*100,100),
complex(100,100)
)
# We can't compare with pi*50 here, because this is just an
# approximation of a circle arc. pi*50 is 157.079632679
# So this is just yet another "warn if this changes" test.
# This value is not to be seens as verified as correct.
self.assertAlmostEqual(arc.length(), 157.1016698)
class QuadraticBezierTest(unittest.TestCase):
def test_svg_examples(self):
"""These is the path in the SVG specs"""
# M200,300 Q400,50 600,300 T1000,300
path1 = QuadraticBezier(200+300j, 400+50j, 600+300j)
self.assertAlmostEqual(path1.point(0), (200+300j))
self.assertAlmostEqual(path1.point(0.3), (336.8+142.5j))
self.assertAlmostEqual(path1.point(0.5), (400+112.5j))
self.assertAlmostEqual(path1.point(0.9), (545.6+232.5j))
self.assertAlmostEqual(path1.point(1), (600+300j))
# T1000, 300
inversion = (600+300j) + (600+300j) - (400+50j)
path2 = QuadraticBezier(600+300j, inversion, 1000+300j)
self.assertAlmostEqual(path2.point(0), (600+300j))
self.assertAlmostEqual(path2.point(0.3), (736.8+457.5j))
self.assertAlmostEqual(path2.point(0.5), (800+487.5j))
self.assertAlmostEqual(path2.point(0.9), (945.6+367.5j))
self.assertAlmostEqual(path2.point(1), (1000+300j))
class ArcTest(unittest.TestCase):
def test_points(self):
arc1 = Arc(0j, 100+50j, 0, 0, 0, 100+50j)
self.assertAlmostEqual(arc1.center, 100+0j)
self.assertAlmostEqual(arc1.theta, 180.0)
self.assertAlmostEqual(arc1.delta, -90.0)
self.assertAlmostEqual(arc1.point(0.0), (0j))
self.assertAlmostEqual(arc1.point(0.1), (1.23116594049+7.82172325201j))
self.assertAlmostEqual(arc1.point(0.2), (4.89434837048+15.4508497187j))
self.assertAlmostEqual(arc1.point(0.3), (10.8993475812+22.699524987j))
self.assertAlmostEqual(arc1.point(0.4), (19.0983005625+29.3892626146j))
self.assertAlmostEqual(arc1.point(0.5), (29.2893218813+35.3553390593j))
self.assertAlmostEqual(arc1.point(0.6), (41.2214747708+40.4508497187j))
self.assertAlmostEqual(arc1.point(0.7), (54.6009500260+44.5503262094j))
self.assertAlmostEqual(arc1.point(0.8), (69.0983005625+47.5528258148j))
self.assertAlmostEqual(arc1.point(0.9), (84.3565534960+49.3844170298j))
self.assertAlmostEqual(arc1.point(1.0), (100+50j))
arc2 = Arc(0j, 100+50j, 0, 1, 0, 100+50j)
self.assertAlmostEqual(arc2.center, 50j)
self.assertAlmostEqual(arc2.theta, 270.0)
self.assertAlmostEqual(arc2.delta, -270.0)
self.assertAlmostEqual(arc2.point(0.0), (0j))
self.assertAlmostEqual(arc2.point(0.1), (-45.399049974+5.44967379058j))
self.assertAlmostEqual(arc2.point(0.2), (-80.9016994375+20.6107373854j))
self.assertAlmostEqual(arc2.point(0.3), (-98.7688340595+42.178276748j))
self.assertAlmostEqual(arc2.point(0.4), (-95.1056516295+65.4508497187j))
self.assertAlmostEqual(arc2.point(0.5), (-70.7106781187+85.3553390593j))
self.assertAlmostEqual(arc2.point(0.6), (-30.9016994375+97.5528258148j))
self.assertAlmostEqual(arc2.point(0.7), (15.643446504+99.3844170298j))
self.assertAlmostEqual(arc2.point(0.8), (58.7785252292+90.4508497187j))
self.assertAlmostEqual(arc2.point(0.9), (89.1006524188+72.699524987j))
self.assertAlmostEqual(arc2.point(1.0), (100+50j))
arc3 = Arc(0j, 100+50j, 0, 0, 1, 100+50j)
self.assertAlmostEqual(arc3.center, 50j)
self.assertAlmostEqual(arc3.theta, 270.0)
self.assertAlmostEqual(arc3.delta, 90.0)
self.assertAlmostEqual(arc3.point(0.0), (0j))
self.assertAlmostEqual(arc3.point(0.1), (15.643446504+0.615582970243j))
self.assertAlmostEqual(arc3.point(0.2), (30.9016994375+2.44717418524j))
self.assertAlmostEqual(arc3.point(0.3), (45.399049974+5.44967379058j))
self.assertAlmostEqual(arc3.point(0.4), (58.7785252292+9.54915028125j))
self.assertAlmostEqual(arc3.point(0.5), (70.7106781187+14.6446609407j))
self.assertAlmostEqual(arc3.point(0.6), (80.9016994375+20.6107373854j))
self.assertAlmostEqual(arc3.point(0.7), (89.1006524188+27.300475013j))
self.assertAlmostEqual(arc3.point(0.8), (95.1056516295+34.5491502813j))
self.assertAlmostEqual(arc3.point(0.9), (98.7688340595+42.178276748j))
self.assertAlmostEqual(arc3.point(1.0), (100+50j))
arc4 = Arc(0j, 100+50j, 0, 1, 1, 100+50j)
self.assertAlmostEqual(arc4.center, 100+0j)
self.assertAlmostEqual(arc4.theta, 180.0)
self.assertAlmostEqual(arc4.delta, 270.0)
self.assertAlmostEqual(arc4.point(0.0), (0j))
self.assertAlmostEqual(arc4.point(0.1), (10.8993475812-22.699524987j))
self.assertAlmostEqual(arc4.point(0.2), (41.2214747708-40.4508497187j))
self.assertAlmostEqual(arc4.point(0.3), (84.3565534960-49.3844170298j))
self.assertAlmostEqual(arc4.point(0.4), (130.901699437-47.5528258148j))
self.assertAlmostEqual(arc4.point(0.5), (170.710678119-35.3553390593j))
self.assertAlmostEqual(arc4.point(0.6), (195.105651630-15.4508497187j))
self.assertAlmostEqual(arc4.point(0.7), (198.768834060+7.82172325201j))
self.assertAlmostEqual(arc4.point(0.8), (180.901699437+29.3892626146j))
self.assertAlmostEqual(arc4.point(0.9), (145.399049974+44.5503262094j))
self.assertAlmostEqual(arc4.point(1.0), (100+50j))
def test_lenght(self):
# I'll test the length calculations by making a circle, in two parts.
arc1 = Arc(0j, 100+100j, 0, 0, 0, 200+0j)
arc2 = Arc(200+0j, 100+100j, 0, 0, 0, 0j)
self.assertAlmostEqual(arc1.length(), pi*100)
self.assertAlmostEqual(arc2.length(), pi*100)
class TestPath(unittest.TestCase):
def test_circle(self):
arc1 = Arc(0j, 100+100j, 0, 0, 0, 200+0j)
arc2 = Arc(200+0j, 100+100j, 0, 0, 0, 0j)
path = Path(arc1, arc2)
self.assertAlmostEqual(path.point(0.0), (0j))
self.assertAlmostEqual(path.point(0.25), (100+100j))
self.assertAlmostEqual(path.point(0.5), (200+0j))
self.assertAlmostEqual(path.point(0.75), (100-100j))
self.assertAlmostEqual(path.point(1.0), (0j))
self.assertAlmostEqual(path.length(), pi*200)
def test_svg_specs(self):
"""The paths that are in the SVG specs"""
# Big pie: M300,200 h-150 a150,150 0 1,0 150,-150 z
path = Path(Line(300+200j, 150+200j), Arc(150+200j, 150+150j, 0, 1, 0, 300+50j), Line(300+50j, 300+200j))
# The points and length for this path are calculated and not regression tests.
self.assertAlmostEqual(path.point(0.0), (300+200j))
self.assertAlmostEqual(path.point(0.14897825542), (150+200j))
self.assertAlmostEqual(path.point(0.5), (406.066017177+306.066017177j))
self.assertAlmostEqual(path.point(1-0.14897825542), (300+50j))
self.assertAlmostEqual(path.point(1.0), (300+200j))
# The errors seem to accumulate. Still 6 decimal places is more than good enough.
self.assertAlmostEqual(path.length(), pi*225+300, places=6)
# Little pie: M275,175 v-150 a150,150 0 0,0 -150,150 z
path = Path(Line(275+175j, 275+25j), Arc(275+25j, 150+150j, 0, 0, 0, 125+175j), Line(125+175j, 275+175j))
# The points and length for this path are calculated and not regression tests.
self.assertAlmostEqual(path.point(0.0), (275+175j))
self.assertAlmostEqual(path.point(0.2800495767557787), (275+25j))
self.assertAlmostEqual(path.point(0.5), (168.93398282201787+68.93398282201787j))
self.assertAlmostEqual(path.point(1-0.2800495767557787), (125+175j))
self.assertAlmostEqual(path.point(1.0), (275+175j))
# The errors seem to accumulate. Still 6 decimal places is more than good enough.
self.assertAlmostEqual(path.length(), pi*75+300, places=6)
# Bumpy path: M600,350 l 50,-25
# a25,25 -30 0,1 50,-25 l 50,-25
# a25,50 -30 0,1 50,-25 l 50,-25
# a25,75 -30 0,1 50,-25 l 50,-25
# a25,100 -30 0,1 50,-25 l 50,-25
path = Path(Line(600+350j, 650+325j),
Arc(650+325j, 25+25j, -30, 0, 1, 700+300j),
Line(700+300j, 750+275j),
Arc(750+275j, 25+50j, -30, 0, 1, 800+250j),
Line(800+250j, 850+225j),
Arc(850+225j, 25+75j, -30, 0, 1, 900+200j),
Line(900+200j, 950+175j),
Arc(950+175j, 25+100j, -30, 0, 1, 1000+150j),
Line(1000+150j, 1050+125j),
)
# These are *not* calculated, but just regression tests. Be skeptical.
self.assertAlmostEqual(path.point(0.0), (600+350j))
self.assertAlmostEqual(path.point(0.3), (755.31526434+217.51578768j))
self.assertAlmostEqual(path.point(0.5), (832.23324151+156.33454892j))
self.assertAlmostEqual(path.point(0.9), (974.00559321+115.26473532j))
self.assertAlmostEqual(path.point(1.0), (1050+125j))
# The errors seem to accumulate. Still 6 decimal places is more than good enough.
self.assertAlmostEqual(path.length(), 860.6756221710)
if __name__ == "__main__":
unittest.main() | gpl-2.0 |
alfa-addon/addon | plugin.video.alfa/servers/nuvid.py | 1 | 1292 | # -*- coding: utf-8 -*-
from core import httptools
from core import scrapertools
from platformcode import logger
def test_video_exists(page_url):
logger.info()
global server, vid
server = scrapertools.find_single_match(page_url, 'www.([A-z0-9-]+).com')
vid = scrapertools.find_single_match(page_url, '(?:embed|video)/([0-9]+)')
data = httptools.downloadpage(page_url).data
if "File was deleted" in data\
or "not Found" in data:
# or "small" in data:
return False, "[%s] El video ha sido borrado o no existe" % server
return True, ""
def get_video_url(page_url, video_password):
logger.info("(page_url='%s')" % page_url)
video_urls = []
url= "https://www.%s.com/player_config_json/?vid=%s&aid=0&domain_id=0&embed=0&ref=null&check_speed=0" %(server,vid)
data = httptools.downloadpage(url).data
data = scrapertools.find_single_match(data, '"files":(.*?)"quality"')
patron = '"([lh])q":"([^"]+)"'
matches = scrapertools.find_multiple_matches(data, patron)
for quality, scrapedurl in matches:
url = scrapedurl.replace("\/", "/")
if "l" in quality: quality = "360p"
if "h" in quality: quality = "720p"
video_urls.append(["[%s] %s" %(server,quality), url])
return video_urls
| gpl-3.0 |
alphafoobar/intellij-community | python/lib/Lib/site-packages/django/views/csrf.py | 289 | 3834 | from django.http import HttpResponseForbidden
from django.template import Context, Template
from django.conf import settings
# We include the template inline since we need to be able to reliably display
# this error message, especially for the sake of developers, and there isn't any
# other way of making it available independent of what is in the settings file.
CSRF_FAILRE_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>403 Forbidden</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
#info { background:#f6f6f6; }
#info ul { margin: 0.5em 4em; }
#info p, #summary p { padding-top:10px; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Forbidden <span>(403)</span></h1>
<p>CSRF verification failed. Request aborted.</p>
{% if no_referer %}
<p>You are seeing this message because this HTTPS site requires a 'Referer
header' to be sent by your Web browser, but none was sent. This header is
required for security reasons, to ensure that your browser is not being
hijacked by third parties.</p>
<p>If you have configured your browser to disable 'Referer' headers, please
re-enable them, at least for this site, or for HTTPS connections, or for
'same-origin' requests.</p>
{% endif %}
</div>
{% if DEBUG %}
<div id="info">
<h2>Help</h2>
{% if reason %}
<p>Reason given for failure:</p>
<pre>
{{ reason }}
</pre>
{% endif %}
<p>In general, this can occur when there is a genuine Cross Site Request Forgery, or when
<a
href='http://docs.djangoproject.com/en/dev/ref/contrib/csrf/#ref-contrib-csrf'>Django's
CSRF mechanism</a> has not been used correctly. For POST forms, you need to
ensure:</p>
<ul>
<li>The view function uses <a
href='http://docs.djangoproject.com/en/dev/ref/templates/api/#subclassing-context-requestcontext'><code>RequestContext</code></a>
for the template, instead of <code>Context</code>.</li>
<li>In the template, there is a <code>{% templatetag openblock %} csrf_token
{% templatetag closeblock %}</code> template tag inside each POST form that
targets an internal URL.</li>
<li>If you are not using <code>CsrfViewMiddleware</code>, then you must use
<code>csrf_protect</code> on any views that use the <code>csrf_token</code>
template tag, as well as those that accept the POST data.</li>
</ul>
<p>You're seeing the help section of this page because you have <code>DEBUG =
True</code> in your Django settings file. Change that to <code>False</code>,
and only the initial error message will be displayed. </p>
<p>You can customize this page using the CSRF_FAILURE_VIEW setting.</p>
</div>
{% else %}
<div id="explanation">
<p><small>More information is available with DEBUG=True.</small></p>
</div>
{% endif %}
</body>
</html>
"""
def csrf_failure(request, reason=""):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_REFERER
t = Template(CSRF_FAILRE_TEMPLATE)
c = Context({'DEBUG': settings.DEBUG,
'reason': reason,
'no_referer': reason == REASON_NO_REFERER
})
return HttpResponseForbidden(t.render(c), mimetype='text/html')
| apache-2.0 |
AutorestCI/azure-sdk-for-python | azure-mgmt-scheduler/azure/mgmt/scheduler/models/service_bus_queue_message.py | 5 | 2735 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .service_bus_message import ServiceBusMessage
class ServiceBusQueueMessage(ServiceBusMessage):
"""ServiceBusQueueMessage.
:param authentication: Gets or sets the Service Bus authentication.
:type authentication: :class:`ServiceBusAuthentication
<azure.mgmt.scheduler.models.ServiceBusAuthentication>`
:param brokered_message_properties: Gets or sets the brokered message
properties.
:type brokered_message_properties:
:class:`ServiceBusBrokeredMessageProperties
<azure.mgmt.scheduler.models.ServiceBusBrokeredMessageProperties>`
:param custom_message_properties: Gets or sets the custom message
properties.
:type custom_message_properties: dict
:param message: Gets or sets the message.
:type message: str
:param namespace: Gets or sets the namespace.
:type namespace: str
:param transport_type: Gets or sets the transport type. Possible values
include: 'NotSpecified', 'NetMessaging', 'AMQP'
:type transport_type: str or :class:`ServiceBusTransportType
<azure.mgmt.scheduler.models.ServiceBusTransportType>`
:param queue_name: Gets or sets the queue name.
:type queue_name: str
"""
_attribute_map = {
'authentication': {'key': 'authentication', 'type': 'ServiceBusAuthentication'},
'brokered_message_properties': {'key': 'brokeredMessageProperties', 'type': 'ServiceBusBrokeredMessageProperties'},
'custom_message_properties': {'key': 'customMessageProperties', 'type': '{str}'},
'message': {'key': 'message', 'type': 'str'},
'namespace': {'key': 'namespace', 'type': 'str'},
'transport_type': {'key': 'transportType', 'type': 'ServiceBusTransportType'},
'queue_name': {'key': 'queueName', 'type': 'str'},
}
def __init__(self, authentication=None, brokered_message_properties=None, custom_message_properties=None, message=None, namespace=None, transport_type=None, queue_name=None):
super(ServiceBusQueueMessage, self).__init__(authentication=authentication, brokered_message_properties=brokered_message_properties, custom_message_properties=custom_message_properties, message=message, namespace=namespace, transport_type=transport_type)
self.queue_name = queue_name
| mit |
javo100/plugin.video.PAQUETEDIVIERTAS2 | servers/filefactory.py | 43 | 3595 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para filefactory
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[filefactory.py] test_video_exists(page_url='%s')" % page_url)
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[filefactory.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = "(www.filefactory.com/file.*?\.mkv)"
logger.info("[filefactory.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[filefactory]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'filefactory' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = "(www.filefactory.com/file.*?\.mp4)"
logger.info("[filefactory.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[filefactory]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'filefactory' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://www.filefactory.com/file/35ip193vzp1f/n/HMD-5x19-ESP.avi
patronvideos = "(www.filefactory.com/file.*?\.avi)"
logger.info("[filefactory.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[filefactory]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'filefactory' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
patronvideos = "(www.filefactory.com/file.*?\.rar)"
logger.info("[filefactory.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[filefactory]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'filefactory' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://filefactory.com/file/15437757
patronvideos = '(filefactory.com/file/[a-z0-9]+)'
logger.info("[filefactory.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[filefactory]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'filefactory' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| gpl-2.0 |
tkasp/osmose-backend | modules/OsmState.py | 4 | 2873 | #-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Frédéric Rodrigo 2017 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
import dateutil.parser
###########################################################################
class dummylog:
def log(self, text):
return
###########################################################################
class OsmState:
def __init__(self, state_file, logger = dummylog()):
self._state_file = state_file
self._logger = logger
self._timestamp = None
with open(state_file, 'r') as f:
state_lines = f.readlines()
for line in state_lines:
logger.log("state: {0}".format(line))
if line.startswith("timestamp="):
s = line.replace('\\', '')
self._timestamp = dateutil.parser.parse(s[len("timestamp="):]).replace(tzinfo=None)
def timestamp(self):
return self._timestamp
###########################################################################
import unittest
class Test(unittest.TestCase):
def test_state(self):
import datetime
s = OsmState("tests/state1.txt")
exp = datetime.datetime(2010, 10, 29, 23, 0, 0, 0, None)
self.assertEqual(s.timestamp(), exp, "got=%s, exp=%s" % (s.timestamp(), exp))
s = OsmState("tests/state2.txt")
exp = datetime.datetime(2017, 9, 3, 16, 47, 2, 0, None)
self.assertEqual(s.timestamp(), exp, "got=%s, exp=%s" % (s.timestamp(), exp))
s = OsmState("tests/state3.txt")
exp = datetime.datetime(2017, 9, 2, 20, 43, 2, 0, None)
self.assertEqual(s.timestamp(), exp, "got=%s, exp=%s" % (s.timestamp(), exp))
| gpl-3.0 |
tommo/gii | packages/Mock/Animator/AnimatorView.py | 1 | 17792 | import random
import time
##----------------------------------------------------------------##
from gii.core import app, signals
from gii.qt import QtEditorModule
from gii.qt.IconCache import getIcon
from gii.qt.controls.GenericTreeWidget import GenericTreeWidget
from gii.qt.dialogs import alertMessage
from gii.moai.MOAIRuntime import MOAILuaDelegate
from gii.SceneEditor import SceneEditorModule, getSceneSelectionManager
from gii.qt.helpers import addWidgetWithLayout, QColorF, unpackQColor
##----------------------------------------------------------------##
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import Qt
##----------------------------------------------------------------##
from mock import _MOCK, isMockInstance
##----------------------------------------------------------------##
from AnimatorWidget import AnimatorWidget
##----------------------------------------------------------------##
def _getModulePath( path ):
import os.path
return os.path.dirname( __file__ ) + '/' + path
def _fixDuplicatedName( names, name, id = None ):
if id:
testName = name + '_%d' % id
else:
id = 0
testName = name
#find duplicated name
if testName in names:
return _fixDuplicatedName( names, name, id + 1)
else:
return testName
##----------------------------------------------------------------##
PREVIEW_SPEED_OPTIONS = [
( '1/10', 0.1 ),
( '1/5', 0.2 ),
( '1/3', 0.33 ),
( '1/2', 0.5 ),
( '1x', 1.0 ),
( '1.5x', 1.5 ),
( '2x', 2.0 ),
( '4x', 4.0 ),
( '10x', 10.0 ),
]
##----------------------------------------------------------------##
class AnimatorView( SceneEditorModule ):
name = 'animator'
dependency = [ 'scene_editor', 'mock' ]
def onLoad( self ):
#UI
self.windowTitle = 'Animator'
self.window = self.requestDockWindow( 'AnimatorView',
title = 'Animator',
size = (120,120),
minSize = (120,120),
dock = 'bottom'
)
self.widget = AnimatorWidget()
self.window.addWidget( self.widget )
self.toolbarTarget = self.addToolBar( 'animator_target', self.widget.toolbarTarget )
self.toolbarClips = self.addToolBar( 'animator_clips', self.widget.toolbarClips )
self.toolbarPlay = self.addToolBar( 'animator_play', self.widget.toolbarPlay )
self.toolbarTrack = self.addToolBar( 'animator_track', self.widget.toolbarTrack )
# self.toolbarEdit = self.addToolBar( 'animator_play', self.widget.toolbarEdit )
signals.connect( 'scene.close', self.onSceneClose )
signals.connect( 'scene.save', self.preSceneSave )
signals.connect( 'scene.saved', self.postSceneSave )
# addWidgetWithLaytut( toolbar,
# self.widget.containerEditTool )
self.addTool( 'animator_target/change_context', label = 'Change Context', icon = 'in' )
self.addTool( 'animator_target/save_data', label = 'Save Data', icon = 'save' )
self.addTool( 'animator_clips/add_clip_group', label = 'add group', icon = 'add_folder' )
self.addTool( 'animator_clips/add_clip', label = 'add', icon = 'add' )
self.addTool( 'animator_clips/remove_clip', label = 'remove', icon = 'remove' )
self.addTool( 'animator_clips/clone_clip', label = 'clone', icon = 'clone' )
self.addTool( 'animator_play/goto_start', label = 'to start', icon = 'rewind' )
# self.addTool( 'animator_play/prev_key', label = 'prev key', icon = 'previous' )
self.addTool( 'animator_play/stop', label = 'stop', icon = 'stop' )
self.addTool( 'animator_play/play', label = 'play', icon = 'play', type = 'check' )
# self.addTool( 'animator_play/next_key', label = 'next key', icon = 'next' )
self.addTool( 'animator_play/goto_end', label = 'to end', icon = 'fast_forward' )
self.addTool( 'animator_play/toggle_repeat', label = 'toggle repeat', icon = 'repeat', type = 'check' )
self.comboPreviewSpeed = QtGui.QComboBox()
self.comboPreviewSpeed.addItems([ e[0] for e in PREVIEW_SPEED_OPTIONS ] )
self.comboPreviewSpeed.setCurrentIndex( 4 ) #1x
self.comboPreviewSpeed.currentIndexChanged.connect( self.onPreviewSpeedChange )
self.addTool( 'animator_play/preview_speed', widget = self.comboPreviewSpeed )
#SIGNALS
self.addTool( 'animator_track/locate_target', label = 'locate', icon = 'find' )
self.addTool( 'animator_track/----' )
self.addTool( 'animator_track/add_track_group', label = 'add group', icon = 'add_folder' )
self.addTool( 'animator_track/add_track', label = 'add', icon = 'add' )
self.addTool( 'animator_track/remove_track', label = 'remove', icon = 'remove' )
#
signals.connect( 'selection.changed', self.onSceneSelectionChanged )
self.delegate = MOAILuaDelegate( self )
self.delegate.load( _getModulePath( 'AnimatorView.lua' ) )
self.widget.setOwner( self )
#playback
self.previewing = False
self.setEditing( False )
self.targetAnimator = None
self.targetClip = None
self.targetAnimatorData = None
self.currentTrack = None
self.previewing = False
self.previewLoop = False
self.previewTime = 0.0
self.previewStep = 1.0/60.0
self.previewTimer = QtCore.QTimer( self.widget )
self.previewTimer.setInterval( 1000.0/65 )
self.previewTimer.stop()
self.previewTimer.timeout.connect( self.onPreviewTimer )
def onStart( self ):
pass
def setEditing( self, editing ):
self.widget.timeline.setEnabled( editing )
self.widget.treeTracks.setEnabled( editing )
self.findTool( 'animator_play' ).setEnabled( editing )
self.findTool( 'animator_track' ).setEnabled( editing )
self.findTool( 'animator_clips/add_clip_group').setEnabled( editing )
self.findTool( 'animator_clips/add_clip' ).setEnabled( editing )
self.findTool( 'animator_clips/remove_clip' ).setEnabled( editing )
self.findTool( 'animator_clips/clone_clip' ).setEnabled( editing )
def setTargetAnimator( self, target ):
self.saveAnimatorData()
if target == self.targetAnimator: return
if self.previewing:
self.stopPreview()
self.targetAnimator = target
self.targetClip = None
self.delegate.callMethod( 'view', 'setTargetAnimator', target )
self.targetAnimatorData = self.delegate.callMethod( 'view', 'getTargetAnimatorData' )
self.widget.rebuild()
if self.targetAnimator:
self.setEditing( True )
signals.emit( 'animator.start' )
else:
self.setEditing( False )
signals.emit( 'animator.stop' )
path = self.delegate.callMethod( 'view', 'getTargetAnimatorDataPath' )
if path:
self.window.setWindowTitle( 'Animator - %s' % path )
else:
self.window.setWindowTitle( 'Animator' )
clip = self.delegate.callMethod( 'view', 'getPreviousTargeClip', target )
self.enableTool( 'animator_play' , False )
self.enableTool( 'animator_track', False )
if clip:
self.widget.treeClips.selectNode( clip )
else:
self.widget.treeClips.selectFirstItem()
self.applyTime( 0, True )
def setTargetClip( self, clip ):
wasPreviewing = self.previewing
if self.previewing:
self.stopPreview()
self.targetClip = clip
self.delegate.callMethod( 'view', 'setTargetClip', clip )
self.widget.rebuildTimeline()
self.enableTool( 'animator_play' , bool( clip ) )
self.enableTool( 'animator_track', bool( clip ) )
self.applyTime( 0, True )
if wasPreviewing:
self.startPreview()
def setCurrentTrack( self, track ):
self.currentTrack = track
self.delegate.callMethod( 'view', 'setCurrentTrack', track )
def getTargetClipLength( self ):
return self.delegate.callMethod( 'view', 'getTargetClipLength' )
def getClipList( self ):
if self.targetAnimatorData:
clipList = self.targetAnimatorData.clips
return [ clip for clip in clipList.values() ]
else:
return []
def getRootClipGroup( self ):
if self.targetAnimatorData:
return self.targetAnimatorData.getRootGroup( self.targetAnimatorData )
def getTrackList( self ):
if self.targetClip:
trackList = self.targetClip.getTrackList( self.targetClip )
return [ track for track in trackList.values() ]
else:
return []
def getMarkerList( self ):
if self.targetClip:
markerList = self.targetClip.getMarkerList( self.targetClip )
return [ track for track in markerList.values() ]
else:
return []
def getClipRoot( self ):
if self.targetClip:
return self.targetClip.getRoot( self.targetClip )
else:
return None
def addClip( self ):
if not self.targetAnimatorData: return
targetGroup = self.widget.getCurrentClipGroup()
cmd = self.doCommand( 'scene_editor/animator_add_clip',
animator_data = self.targetAnimatorData,
parent_group = targetGroup
)
clip = cmd.getResult()
if clip:
self.widget.addClip( clip, True )
return clip
def addClipGroup( self ):
if not self.targetAnimatorData: return
targetGroup = self.widget.getCurrentClipGroup()
cmd = self.doCommand( 'scene_editor/animator_add_clip_group',
animator_data = self.targetAnimatorData,
parent_group = targetGroup
)
group = cmd.getResult()
if group:
self.widget.addClip( group, True )
return group
def removeClipNode( self ):
for clip in self.widget.treeClips.getSelection():
if self.doCommand( 'scene_editor/animator_remove_clip_node',
animator_data = self.targetAnimatorData,
target_node = clip
):
self.widget.removeClip( clip )
def cloneClipNode( self ):
if not self.targetClip: return
result = []
for clip in self.widget.treeClips.getSelection():
cmd = self.doCommand( 'scene_editor/animator_clone_clip_node',
animator_data = self.targetAnimatorData,
target_node = clip
)
if cmd:
cloned = cmd.getResult()
self.widget.addClip( cloned )
result.append( cloned )
return result
def onObjectEdited( self, obj ):
if self.targetClip:
self.delegate.callMethod( 'view', 'clearPreviewState' )
self.delegate.callMethod( 'view', 'markClipDirty' )
def onSceneSelectionChanged( self, selection, key ):
if key != 'scene': return
#find animator component
# self.findTargetAnimator()
def findTargetAnimator( self ):
target = self.delegate.callMethod( 'view', 'findTargetAnimator' )
self.setTargetAnimator( target )
return target
def checkTargetAnimator( self ):
if not self.targetAnimator:
alertMessage( 'No Animator', 'No Animator Selected', 'question' )
return False
return True
def addMarker( self ):
if not self.targetClip: return
cmd = self.doCommand( 'scene_editor/animator_add_marker' ,
target_clip = self.targetClip,
target_pos = self.widget.getCursorPos()
)
if cmd:
marker = cmd.getResult()
self.widget.addMarker( marker )
def addKeyForField( self, target, fieldId ):
if not self.checkTargetAnimator(): return
if not self.targetClip:
self.addClip()
# alertMessage( 'No Clip', 'You need to select a Clip first', 'question' )
# return False
keys = self.delegate.callMethod( 'view', 'addKeyForField', target, fieldId )
if keys:
for key in keys.values():
self.widget.addKey( key, True )
def addKeyForEvent( self, target, eventId ):
pass
def addCustomAnimatorTrack( self, target, trackClasId ):
if not self.checkTargetAnimator(): return
track = self.delegate.callMethod( 'view', 'addCustomAnimatorTrack', target, trackClasId )
if track:
self.widget.addTrack( track )
def addKeyForSelectedTracks( self ):
#TODO: command
selectedTracks = self.widget.getTrackSelection()
for track in selectedTracks:
keys = self.delegate.callMethod( 'view', 'addKeyForSelectedTrack', track )
if keys:
for key in keys.values():
self.widget.addKey( key, True )
def removeSelectedKeys( self ):
#TODO: command
selectedKeys = self.widget.getKeySelection()
for key in selectedKeys:
self.widget.removeKey( key )
def cloneSelectedKeys( self ):
#TODO: command
selectedKeys = self.widget.getKeySelection()
cloned = []
for key in selectedKeys:
clonedKey = self.delegate.callMethod( 'view', 'cloneKey', key )
if clonedKey:
cloned.append( clonedKey )
for clonedKey in cloned:
self.widget.addKey( clonedKey, False )
def onKeyRemoving( self, key ):
if self.delegate.callMethod( 'view', 'removeKey', key ) != False:
return True
def onMarkerRemoving( self, marker ):
if self.delegate.callMethod( 'view', 'removeMarker', marker ) != False:
return True
def onClipLengthChanging( self, t1 ):
if self.delegate.callMethod( 'view', 'setTargetClipLength', t1 ) != False:
return True
def onTimelineKeyChanged( self, key, pos, length ):
self.delegate.callMethod( 'view', 'updateTimelineKey', key, pos, length )
def onTimelineKeyCurveValueChanged( self, key, value ):
self.delegate.callMethod( 'view', 'updateTimelineKeyCurveValue', key, value )
def onTimelineKeyTweenModeChanged( self, key, mode ):
self.delegate.callMethod( 'view', 'updateTimelineKeyTweenMode', key, mode )
def onTimelineKeyBezierPointChanged( self, key, bpx0, bpy0, bpx1, bpy1 ):
self.delegate.callMethod( 'view', 'updateTimelineKeyBezierPoint', key, bpx0, bpy0, bpx1, bpy1 )
def onTimelineMarkerChanged( self, marker, pos ):
self.delegate.callMethod( 'view', 'updateTimelineMarker', marker, pos )
def toggleTrackActive( self, track ):
#TODO: command
# self.module.doCommand( 'scene_editor/toggle_entity_visibility', target = node )
self.delegate.callMethod( 'view', 'toggleTrackActive', track )
def renameTrack( self, track, name ):
self.delegate.callMethod( 'view', 'renameTrack', track, name )
def renameClip( self, clip, name ):
self.delegate.callMethod( 'view', 'renameClip', clip, name )
def onTool( self, tool ):
name = tool.name
if name == 'change_context':
target0 = self.targetAnimator
target1 = self.findTargetAnimator()
if ( not target0 ) and ( not target1 ):
alertMessage( 'No Animator', 'No Animator found in selected entity scope', 'question' )
elif name == 'save_data':
self.saveAnimatorData()
elif name == 'add_clip':
if self.checkTargetAnimator():
self.addClip()
elif name == 'add_clip_group':
if self.checkTargetAnimator():
self.addClipGroup()
elif name == 'remove_clip':
if self.checkTargetAnimator():
self.removeClipNode()
elif name == 'clone_clip':
if self.checkTargetAnimator():
self.cloneClipNode()
elif name == 'add_track_group':
group = self.delegate.callMethod( 'view', 'addTrackGroup' )
if group:
self.widget.addTrack( group, True )
elif name == 'remove_track':
for track in self.widget.treeTracks.getSelection():
self.delegate.callMethod( 'view', 'removeTrack', track )
self.widget.removeTrack( track )
elif name == 'locate_target':
for track in self.widget.treeTracks.getSelection():
sceneGraphEditor = self.getModule( 'scenegraph_editor')
if sceneGraphEditor:
targetEntity = self.delegate.callMethod( 'view', 'findTrackEntity', track )
if targetEntity:
sceneGraphEditor.selectEntity( targetEntity, focus_tree = True )
#pass
return
#preview
elif name == 'goto_start':
self.gotoStart()
elif name == 'goto_end':
self.gotoEnd()
elif name == 'play':
if tool.getValue():
self.startPreview()
else:
self.stopPreview( False )
elif name == 'stop':
self.stopPreview( True )
elif name == 'toggle_repeat':
self.delegate.callMethod( 'view', 'togglePreviewRepeat', tool.getValue() )
def getActiveSceneView( self ):
return self.getModule( 'scene_view' )
#preview
def startPreview( self ):
self.saveAnimatorData()
if self.delegate.callMethod( 'view', 'startPreview', self.previewTime ):
self.widget.setCursorMovable( False )
self.previewing = True
self.findTool( 'animator_play/play' ).setValue( True )
self.previewTimer.start()
self.getApp().setMinimalMainLoopBudget()
def stopPreview( self, rewind = False ):
if self.previewing:
self.delegate.callMethod( 'view', 'stopPreview' )
self.getApp().resetMainLoopBudget()
self.widget.setCursorMovable( True )
self.previewing = False
self.findTool( 'animator_play/play' ).setValue( False )
self.previewTimer.stop()
signals.emit( 'entity.modified', None , '' )
if rewind:
self.gotoStart()
def onPreviewTimer( self ):
playing, currentTime = self.delegate.callMethod( 'view', 'doPreviewStep' )
self.previewTime = currentTime
self.getActiveSceneView().forceUpdate()
self.widget.setCursorPos( self.previewTime )
if not playing:
self.stopPreview()
# signals.emit( 'entity.modified', None , '' )
def gotoStart( self ):
if self.previewing:
self.delegate.callMethod( 'view', 'applyTime', 0 )
else:
self.widget.setCursorPos( 0, True )
def gotoEnd( self ):
if self.previewing:
self.delegate.callMethod( 'view', 'applyTime', 10 )
else:
self.widget.setCursorPos( 10, True )
def applyTime( self, t, syncCursor = False ):
self.previewTime = self.delegate.callMethod( 'view', 'applyTime', t )
self.getActiveSceneView().forceUpdate()
signals.emit( 'entity.modified', None , '' )
if syncCursor:
self.widget.setCursorPos( t )
def saveAnimatorData( self ):
if not self.targetAnimator:
return
self.delegate.callMethod( 'view', 'saveData' )
def preSceneSave( self ):
if self.targetAnimator:
self.delegate.callMethod( 'view', 'restoreEntityState' )
def postSceneSave( self ):
if self.targetAnimator:
self.applyTime( self.previewTime )
def onSceneClose( self, scene ):
self.setTargetAnimator( None )
def onPreviewSpeedChange( self, index ):
label, throttle = PREVIEW_SPEED_OPTIONS[ index ]
self.delegate.callMethod( 'view', 'setPreviewThrottle', throttle )
def refreshTimeline( self ):
self.widget.rebuildTimeline()
def refreshClipList( self ):
self.widget.rebuildClipList()
def refreshAll( self ):
self.widget.rebuild()
| mit |
AsimmHirani/ISpyPi | tensorflow/contrib/tensorflow-master/tensorflow/python/ops/functional_ops.py | 7 | 23660 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Functional operations. See the @{$python/functional_ops} guide.
@@map_fn
@@foldl
@@foldr
@@scan
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_functional_ops import *
# pylint: enable=wildcard-import
# pylint: disable=unused-import
from tensorflow.python.ops.gen_functional_ops import _symbolic_gradient
# pylint: enable=unused-import
from tensorflow.python.util import nest
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def foldl(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldl on the list of tensors unpacked from `elems` on dimension 0.
This foldl operator repeatedly applies the callable `fn` to a sequence
of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor to be unpacked on dimension 0.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from first to last.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldl(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.name_scope(name, "foldl", [elems]):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unstack(elems)
if initializer is None:
a = elems_ta.read(0)
i = constant_op.constant(1)
else:
a = ops.convert_to_tensor(initializer)
i = constant_op.constant(0)
def compute(i, a):
a = fn(a, elems_ta.read(i))
return [i + 1, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i < n, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def foldr(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldr on the list of tensors unpacked from `elems` on dimension 0.
This foldr operator repeatedly applies the callable `fn` to a sequence
of elements from last to first. The elements are made of the tensors
unpacked from `elems`. The callable fn takes two tensors as arguments.
The first argument is the accumulated value computed from the preceding
invocation of fn. If `initializer` is None, `elems` must contain at least
one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor that is unpacked into a sequence of tensors to apply `fn`.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from last to first.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldr(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.name_scope(name, "foldr", [elems]):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unstack(elems)
if initializer is None:
i = n - 1
a = elems_ta.read(i)
else:
i = n
a = ops.convert_to_tensor(initializer)
def compute(i, a):
i -= 1
a = fn(a, elems_ta.read(i))
return [i, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i > 0, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def map_fn(fn, elems, dtype=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""map on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `map` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the
tensors unpacked from `elems`. `dtype` is the data type of the return
value of `fn`. Users must provide `dtype` if it is different from
the data type of `elems`.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[values.shape[0]] + fn(values[0]).shape`.
This method also allows multi-arity `elems` and output of `fn`. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The signature of `fn` may
match the structure of `elems`. That is, if `elems` is
`(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
`fn = lambda (t1, [t2, t3, [t4, t5]]):`.
Furthermore, `fn` may emit a different structure than its input. For example,
`fn` may look like: `fn = lambda t1: return (t1 + 1, t1 - 1)`. In this case,
the `dtype` parameter is not optional: `dtype` must be a type or (possibly
nested) tuple of types matching the output of `fn`.
To apply a functional operation to the nonzero elements of a SparseTensor
one of the following methods is recommended. First, if the function is
expressible as TensorFlow ops, use
```python
result = SparseTensor(input.indices, fn(input.values), input.dense_shape)
```
If, however, the function is not expressible as a TensorFlow op, then use
```python
result = SparseTensor(
input.indices, map_fn(fn, input.values), input.dense_shape)
```
instead.
Args:
fn: The callable to be performed. It accepts one argument, which will
have the same (possibly nested) structure as `elems`. Its output
must have the same structure as `dtype` if one is provided, otherwise
it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be applied to `fn`.
dtype: (optional) The output type(s) of `fn`. If `fn` returns a structure
of Tensors differing from the structure of `elems`, then `dtype` is not
optional and must have the same structure as the output of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `dtype` do not match, or if elems is a SparseTensor.
ValueError: if the lengths of the output of `fn` and `dtype` do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
squares = map_fn(lambda x: x * x, elems)
# squares == [1, 4, 9, 16, 25, 36]
```
```python
elems = (np.array([1, 2, 3]), np.array([-1, 1, -1]))
alternate = map_fn(lambda x: x[0] * x[1], elems, dtype=tf.int64)
# alternate == [-1, 2, -3]
```
```python
elems = np.array([1, 2, 3])
alternates = map_fn(lambda x: (x, -x), elems, dtype=(tf.int64, tf.int64))
# alternates[0] == [1, 2, 3]
# alternates[1] == [-1, -2, -3]
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
if isinstance(elems, sparse_tensor.SparseTensor):
raise TypeError(
"To perform a map on the values of a sparse tensor use either "
" SparseTensor(input.indices, fn(input.values), input.dense_shape) or "
" SparseTensor(input.indices, map_fn(fn, input.values), "
"input.dense_shape)")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if dtype is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(dtype)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(dtype, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.name_scope(name, "map", elems_flat):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
dtype = dtype or input_pack([elem.dtype for elem in elems_flat])
dtype_flat = output_flatten(dtype)
# Convert elems to tensor array.
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unstack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
i = constant_op.constant(0)
accs_ta = [
tensor_array_ops.TensorArray(dtype=dt, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for dt in dtype_flat]
def compute(i, tas):
"""The loop body of map_fn.
Args:
i: the loop counter
tas: the flat TensorArray accumulator list
Returns:
(i + 1, tas): the updated counter + updated TensorArrays
Raises:
TypeError: if dtype and packed_fn_values structure do not match
ValueType: if dtype and packed_fn_values lengths do not match
"""
packed_values = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_fn_values = fn(packed_values)
nest.assert_same_structure(dtype or elems, packed_fn_values)
flat_fn_values = output_flatten(packed_fn_values)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_fn_values)]
return (i + 1, tas)
_, r_a = control_flow_ops.while_loop(
lambda i, _: i < n, compute, (i, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
results_flat = [r.stack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
def scan(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""scan on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `scan` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[len(values)] + fn(initializer, values[0]).shape`.
This method also allows multi-arity `elems` and accumulator. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The second argument of
`fn` must match the structure of `elems`.
If no `initializer` is provided, the output structure and dtypes of `fn`
are assumed to be the same as its input; and in this case, the first
argument of `fn` must match the structure of `elems`.
If an `initializer` is provided, then the output of `fn` must have the same
structure as `initializer`; and the first argument of `fn` must match
this structure.
For example, if `elems` is `(t1, [t2, t3])` and `initializer` is
`[i1, i2]` then an appropriate signature for `fn` in `python2` is:
`fn = lambda (acc_p1, acc_p2), (t1 [t2, t3]):` and `fn` must return a list,
`[acc_n1, acc_n2]`. An alternative correct signature for `fn`, and the
one that works in `python3`, is:
`fn = lambda a, t:`, where `a` and `t` correspond to the input tuples.
Args:
fn: The callable to be performed. It accepts two arguments. The first
will have the same structure as `initializer` if one is provided,
otherwise it will have the same structure as `elems`. The second
will have the same (possibly nested) structure as `elems`. Its output
must have the same structure as `initializer` if one is provided,
otherwise it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be the first argument to `fn`.
initializer: (optional) A tensor or (possibly nested) sequence of tensors,
initial value for the accumulator, and the expected output type of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, and the previous accumulator value(s), from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `initializer` do not match.
ValueError: if the lengths of the output of `fn` and `initializer`
do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
sum = scan(lambda a, x: a + x, elems)
# sum == [1, 3, 6, 10, 15, 21]
```
```python
elems = np.array([1, 2, 3, 4, 5, 6])
initializer = np.array(0)
sum_one = scan(
lambda a, x: x[0] - x[1] + a, (elems + 1, elems), initializer)
# sum_one == [1, 2, 3, 4, 5, 6]
```
```python
elems = np.array([1, 0, 0, 0, 0, 0])
initializer = (np.array(0), np.array(1))
fibonaccis = scan(lambda a, _: (a[1], a[0] + a[1]), elems, initializer)
# fibonaccis == ([1, 1, 2, 3, 5, 8], [1, 2, 3, 5, 8, 13])
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if initializer is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(initializer)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(initializer, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.name_scope(name, "scan", elems_flat):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unstack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
if initializer is None:
a_flat = [elem.read(0) for elem in elems_ta]
i = constant_op.constant(1)
else:
initializer_flat = output_flatten(initializer)
a_flat = [ops.convert_to_tensor(init) for init in initializer_flat]
i = constant_op.constant(0)
# Create a tensor array to store the intermediate values.
accs_ta = [
tensor_array_ops.TensorArray(dtype=init.dtype, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for init in a_flat]
if initializer is None:
accs_ta = [acc_ta.write(0, a) for (acc_ta, a) in zip(accs_ta, a_flat)]
def compute(i, a_flat, tas):
"""The loop body of scan.
Args:
i: the loop counter.
a_flat: the accumulator value(s), flattened.
tas: the output accumulator TensorArray(s), flattened.
Returns:
[i + 1, a_flat, tas]: the updated counter + new accumulator values +
updated TensorArrays
Raises:
TypeError: if initializer and fn() output structure do not match
ValueType: if initializer and fn() output lengths do not match
"""
packed_elems = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_a = output_pack(a_flat)
a_out = fn(packed_a, packed_elems)
nest.assert_same_structure(
elems if initializer is None else initializer, a_out)
flat_a_out = output_flatten(a_out)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_a_out)]
return (i + 1, flat_a_out, tas)
_, _, r_a = control_flow_ops.while_loop(
lambda i, _1, _2: i < n, compute, (i, a_flat, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop, swap_memory=swap_memory)
results_flat = [r.stack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
| apache-2.0 |
MiczFlor/Booktype | lib/booktype/convert/image_editor_conversion.py | 1 | 11223 | import os
import json
import shutil
import ebooklib
import StringIO
import logging
from django.conf import settings
from booktype.utils import config
from booktype.utils.image_editor import BkImageEditor
try:
import Image
except ImportError:
from PIL import Image
logger = logging.getLogger("booktype.convert")
EDITOR_WIDTH = config.get_configuration('CONVERT_EDITOR_WIDTH')
class ImageEditorConversion(object):
def __init__(self, original_book, output_document_width, converter):
self._original_book = original_book
self._output_document_width = output_document_width
self._converter = converter
# cache path for edited images
# example: /data/tmp/bk_image_editor/<project_id>
self._cache_folder = os.path.abspath(
os.path.join(settings.MEDIA_ROOT, 'bk_image_editor', self._converter.config.get("project_id"))
)
def convert(self, html):
"""
Parse html, search for image and edit it
"""
for img_element in html.iter('img'):
if img_element.get('src'):
# validate image extension
extension = img_element.get('src').rsplit('.', 1)[1].lower()
if extension in BkImageEditor.EXTENSION_MAP:
self._edit_image(img_element)
return html
def _edit_image(self, elem):
"""
Edit image
"""
ebooklib_item_image = None
src = elem.get('src')
div_image = elem.getparent()
div_group_img = div_image.getparent()
##############################
# image structure inside <a> #
##############################
if div_group_img.getparent() is not None and div_group_img.getparent().tag == 'a':
div_group_img.drop_tag()
div_image.drop_tag()
if elem.get('transform-data'):
del elem.attrib['transform-data']
if elem.get('style'):
del elem.attrib['style']
if elem.get('width'):
del elem.attrib['width']
if elem.get('height'):
del elem.attrib['height']
elem.set('style', 'display: inline-block;')
return
###########################
# find ebook image object #
###########################
for item in self._original_book.get_items_of_type(ebooklib.ITEM_IMAGE):
if item.file_name.rsplit('/')[-1] == src.rsplit('/')[-1]:
ebooklib_item_image = item
break
# we didn't find image object
else:
if elem.get('transform-data'):
del elem.attrib['transform-data']
return
###########################
# validate transform-data #
###########################
try:
transform_data = json.loads(elem.get('transform-data'))
if transform_data['imageWidth'] < 50:
transform_data['imageWidth'] = 50
if transform_data['imageHeight'] < 50:
transform_data['imageHeight'] = 50
if transform_data['frameWidth'] < 50:
transform_data['frameWidth'] = 50
if transform_data['frameHeight'] < 50:
transform_data['frameHeight'] = 50
elem.set('transform-data', json.dumps(transform_data))
except (ValueError, Exception) as e:
if elem.get('transform-data'):
del elem.attrib['transform-data']
#################################
# create default transform-data #
#################################
if not elem.get('transform-data'):
transform_data = {
'imageWidth': None,
'imageHeight': None,
'imageTranslateX': 0,
'imageTranslateY': 0,
'imageScaleX': 1,
'imageScaleY': 1,
'imageRotateDegree': 0,
'imageContrast': 100,
'imageBrightness': 100,
'imageBlur': 0,
'imageSaturate': 100,
'imageOpacity': 100,
'frameWidth': None,
'frameHeight': None,
'editorWidth': EDITOR_WIDTH
}
div_image_style = div_image.get('style', '')
if 'width: ' in div_image_style and 'height: ' in div_image_style:
width = div_image_style.rsplit('width: ')[1].split('px', 1)[0]
height = div_image_style.rsplit('height: ')[1].split('px', 1)[0]
transform_data['imageWidth'] = transform_data['frameWidth'] = width
transform_data['imageHeight'] = transform_data['frameHeight'] = height
else:
# get natural image width and height
with Image.open(StringIO.StringIO(item.get_content())) as im:
natural_width, natural_height = im.size
if natural_width <= EDITOR_WIDTH:
transform_data['imageWidth'] = transform_data['frameWidth'] = natural_width
transform_data['imageHeight'] = transform_data['frameHeight'] = natural_height
else:
# this should be done with quotient
quotient = EDITOR_WIDTH / float(natural_width)
transform_data['imageWidth'] = transform_data['frameWidth'] = float(natural_width) * quotient
transform_data['imageHeight'] = transform_data['frameHeight'] = float(natural_height) * quotient
# record transform_data
elem.set('transform-data', json.dumps(transform_data))
##########################
# delete redundant attrs #
##########################
if elem.get('style'):
del elem.attrib['style']
if elem.get('width'):
del elem.attrib['width']
if elem.get('height'):
del elem.attrib['height']
if div_image.get('style'):
del div_image.attrib['style']
###########################
# resize && update styles #
###########################
transform_data = json.loads(elem.get('transform-data'))
del elem.attrib['transform-data']
# proportionally resize according to self._output_document_width
quotient = float(EDITOR_WIDTH) / float(self._output_document_width)
transform_data['imageWidth'] = float(transform_data['imageWidth']) / quotient
transform_data['frameWidth'] = float(transform_data['frameWidth']) / quotient
transform_data['imageHeight'] = float(transform_data['imageHeight']) / quotient
transform_data['frameHeight'] = float(transform_data['frameHeight']) / quotient
transform_data['imageTranslateX'] = float(transform_data['imageTranslateX']) / quotient
transform_data['imageTranslateY'] = float(transform_data['imageTranslateY']) / quotient
# TODO handle full page images
# set style for image
image_style = 'display: inline-block;'
# this solution work with kindle
width_percent = 100 - (100 - 100 * float(transform_data['frameWidth']) / self._output_document_width)
width_percent = round(width_percent, 1)
image_style += ' width: {0}%;'.format(width_percent)
# TODO only for epub and xhtml
elem.set('style', image_style)
# find old captions using p.caption_small
for p_caption in div_group_img.xpath('p[contains(@class,"caption_small")]'):
if p_caption.get('style'):
del p_caption.attrib['style']
if p_caption.get('class'):
del p_caption.attrib['class']
# set new class and change tag
p_caption.set('class', 'caption_small')
p_caption.tag = 'div'
# set width for caption div
for div_caption in div_group_img.xpath('div[contains(@class,"caption_small")]'):
new_style = 'width: {0}%; display: inline-block;'.format(width_percent)
# # text-align: left -> margin-right: auto
# # text-align: right -> margin-left: auto
# # text-align: center -> margin: auto
# # text-align: justify -> margin-right: auto
try:
text_align = div_group_img.get('style').split('text-align: ', 1)[1].split(';', 1)[0]
if text_align == 'center':
new_style += ' margin: auto;'
elif text_align == 'right':
new_style += ' margin-left: auto;'
else:
new_style += ' margin-right: auto;'
except (KeyError, Exception):
pass
div_caption.set('style', new_style)
#################
# start editing #
#################
# cache path for edited images
# example: /data/tmp/bk_image_editor/<project_id>
cache_folder = os.path.abspath(
os.path.join(settings.MEDIA_ROOT, 'bk_image_editor', self._cache_folder)
)
output_image_path = None
input_image_filename = ebooklib_item_image.file_name.rsplit('/')[-1]
with Image.open(StringIO.StringIO(ebooklib_item_image.get_content())) as img:
ie = BkImageEditor(input_image_file=img, input_image_filename=input_image_filename,
cache_folder=cache_folder)
output_image_path = ie.process(
image_width=int(transform_data['imageWidth']),
image_height=int(transform_data['imageHeight']),
image_translate_x=int(transform_data['imageTranslateX']),
image_translate_y=int(transform_data['imageTranslateY']),
image_flip_x=bool(-1 == int(transform_data['imageScaleX'])),
image_flip_y=bool(-1 == int(transform_data['imageScaleY'])),
image_rotate_degree=int(transform_data['imageRotateDegree']) * (-1),
image_contrast=float(transform_data['imageContrast']) / 100,
image_brightness=float(transform_data['imageBrightness']) / 100,
image_blur=float(transform_data['imageBlur']),
image_saturate=float(transform_data['imageSaturate']) / 100,
image_opacity=int(transform_data['imageOpacity']),
frame_width=int(transform_data['frameWidth']),
frame_height=int(transform_data['frameHeight'])
)
# semething went wrong
if not output_image_path or not os.path.isfile(output_image_path):
return
# copy edited image into export package,
# to have everything we need in one place
dst = os.path.join(
os.path.dirname(self._converter.images_path),
os.path.basename(output_image_path)
)
if not os.path.exists(self._converter.images_path):
os.makedirs(self._converter.images_path)
shutil.copy(output_image_path, self._converter.images_path)
# change image src in html
elem.set("src", dst)
| agpl-3.0 |
timokoola/timoechobot | docutils/languages/en.py | 246 | 1848 | # $Id: en.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
English-language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
# fixed: language-dependent
'author': 'Author',
'authors': 'Authors',
'organization': 'Organization',
'address': 'Address',
'contact': 'Contact',
'version': 'Version',
'revision': 'Revision',
'status': 'Status',
'date': 'Date',
'copyright': 'Copyright',
'dedication': 'Dedication',
'abstract': 'Abstract',
'attention': 'Attention!',
'caution': 'Caution!',
'danger': '!DANGER!',
'error': 'Error',
'hint': 'Hint',
'important': 'Important',
'note': 'Note',
'tip': 'Tip',
'warning': 'Warning',
'contents': 'Contents'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
# language-dependent: fixed
'author': 'author',
'authors': 'authors',
'organization': 'organization',
'address': 'address',
'contact': 'contact',
'version': 'version',
'revision': 'revision',
'status': 'status',
'date': 'date',
'copyright': 'copyright',
'dedication': 'dedication',
'abstract': 'abstract'}
"""English (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| apache-2.0 |
NervanaSystems/neon | examples/ssd/datasets/ingest_kitti.py | 1 | 13067 | #!/usr/bin/env python
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from __future__ import print_function
import os
import glob
import json
import numpy as np
from PIL import Image
import math
from tqdm import tqdm
from collections import OrderedDict
import ingest_utils as util
from neon.util.persist import get_data_cache_or_nothing
def get_ssd_config(img_reshape, inference=False):
ssd_config = OrderedDict()
ssd_config['batch_size'] = 32
if inference:
ssd_config['batch_size'] = 1
ssd_config['block_size'] = 50
ssd_config['cache_directory'] = get_data_cache_or_nothing(subdir='kitti_cache')
ssd_config["etl"] = [{
"type": "localization_ssd",
"height": img_reshape[0],
"width": img_reshape[1],
"max_gt_boxes": 500,
"class_names": ['__background__', 'Car', 'Van', 'Truck', 'Pedestrian',
'Person_sitting', 'Cyclist', 'Tram', 'Misc', 'DontCare']
}, {
"type": "image",
"height": img_reshape[0],
"width": img_reshape[1],
"channels": 3
}]
if not inference:
ssd_config["augmentation"] = [{
"type": "image",
"batch_samplers":
[
{
"max_sample": 1,
"max_trials": 1
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"min_jaccard_overlap": 0.1}
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"min_jaccard_overlap": 0.3}
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"min_jaccard_overlap": 0.5}
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"min_jaccard_overlap": 0.7}
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"min_jaccard_overlap": 0.9}
},
{
"max_sample": 1,
"max_trials": 50,
"sampler": {"scale": [0.3, 1.0], "aspect_ratio": [0.5, 2.0]},
"sample_constraint": {"max_jaccard_overlap": 1.0, "min_jaccard_overlap": 0.1}
}
]
}]
ssd_config['ssd_config'] = OrderedDict(
[('conv4_3', {'min_sizes': 30.0, 'max_sizes': 60.0,
'aspect_ratios': 2.0, 'step': 8, 'normalize': True}),
('fc7', {'min_sizes': 60.0, 'max_sizes': 111.0,
'aspect_ratios': (2.0, 3.0), 'step': 16}),
('conv6_2', {'min_sizes': 111.0, 'max_sizes': 162.0,
'aspect_ratios': (2.0, 3.0), 'step': 32}),
('conv7_2', {'min_sizes': 162.0, 'max_sizes': 213.0,
'aspect_ratios': (2.0, 3.0), 'step': 64}),
('conv8_2', {'min_sizes': 213.0, 'max_sizes': 264.0,
'aspect_ratios': 2.0, 'step': 100}),
('conv9_2', {'min_sizes': 264.0, 'max_sizes': 315.0,
'aspect_ratios': 2.0, 'step': {'step_h': 300, 'step_w': 100}})])
return ssd_config
def convert_annot_to_json(path, im_path, out_path, difficult, img_reshape=None):
"""
Converts the KITTI annotations to json file.
Uses the below reference for the KITTI dataset:
OO representation of label format used in Kitti dataset.
Description of fields from Kitti dataset dev kit: (link)[]
The label files contain the following information, which can be read and
written using the matlab tools (readLabels.m, writeLabels.m) provided within
this devkit. All values (numerical or strings) are separated via spaces,
each row corresponds to one object. The 15 columns represent:
#Values Name Description
----------------------------------------------------------------------------
1 type Describes the type of object: 'Car', 'Van', 'Truck',
'Pedestrian', 'Person_sitting', 'Cyclist', 'Tram',
'Misc' or 'DontCare'
1 truncated Float from 0 (non-truncated) to 1 (truncated), where
truncated refers to the object leaving image boundaries
1 occluded Integer (0,1,2,3) indicating occlusion state:
0 = fully visible, 1 = partly occluded
2 = largely occluded, 3 = unknown
1 alpha Observation angle of object, ranging [-pi..pi]
4 bbox 2D bounding box of object in the image (0-based index):
contains left, top, right, bottom pixel coordinates
3 dimensions 3D object dimensions: height, width, length (in meters)
3 location 3D object location x,y,z in camera coordinates (in meters)
1 rotation_y Rotation ry around Y-axis in camera coordinates [-pi..pi]
1 score Only for results: Float, indicating confidence in
detection, needed for p/r curves, higher is better.
Arguments:
path (string): path to KITTI annotation file
im_path (string): path to image
out_path (string): path to save the json file
difficult (bool): include difficult objects
img_reshape (tuple of int): if a tuple of H,W values is given, image will be reshaped
"""
with open(path) as f:
labels = f.readlines()
# start empty dictionary
annot = {'object': []}
# load image
im = np.array(Image.open(im_path))
scale, (h, w) = util.get_image_scale(im.shape[:2], img_reshape)
c = im.shape[2]
annot['size'] = {'depth': c, 'height': h, 'width': w}
for label in labels:
vals = label.split()
typeid = vals[0]
truncated = float(vals[1])
occluded = int(vals[2])
bbox = [float(x) for x in vals[4:8]]
bbox = util.scale_boxes(bbox, scale)
bbox_int = tuple([int(math.floor(x)) for x in bbox])
if typeid == 'DontCare':
assert truncated == -1
assert occluded == -1
else:
assert occluded in (0, 1, 2, 3)
diff = truncated > 0.5 or occluded == 2
# add object to annotation
obj = {'bndbox': {'xmin': bbox_int[0], 'ymin': bbox_int[1],
'xmax': bbox_int[2], 'ymax': bbox_int[3]},
'difficult': diff,
'name': typeid,
'truncated': truncated > 0.5,
'occluded': occluded
}
if not diff or difficult:
annot['object'].append(obj)
with open(out_path, 'w') as f:
json.dump(annot, f, indent=4)
def ingest_kitti(input_dir, out_dir, img_reshape=(300, 994),
train_percent=90, overwrite=False, skip_unzip=False):
"""
Ingests the KITTI dataset. Peforms the following ops:
0. Unzips the files into output directory.
1. Reshapes image to lower resolution (default reshape of 300x994 maintains KITTI image AR)
1. Convert annotations to json format
2. Split the training data into train and validation sets
3. Write manifest file
4. Write configuration file
Arguments:
input_dir (string): path to folder with KITTI zip files.
out_dir (string): path to unzip KITTI data
img_reshape (tuple of int): size to reshape image (default = (300, 994))
train_percent (float): percent of data to use for training.
overwrite (bool): overwrite existing files
"""
assert img_reshape is not None, "Target image reshape required."
hw = '{}x{}'.format(img_reshape[0], img_reshape[1])
zip_files = ['data_object_image_2.zip', 'data_object_label_2.zip']
root_dir = os.path.join(out_dir, 'kitti')
train_manifest = os.path.join(root_dir, 'train_{}.csv'.format(hw))
val_manifest = os.path.join(root_dir, 'val_{}.csv'.format(hw))
if os.path.exists(train_manifest) and os.path.exists(val_manifest) and not overwrite:
print("Manifest files already found, skipping ingest.")
print("Use --overwrite flag to force re-ingest.")
return
util.make_dir(root_dir)
tags = {'trainval': [], 'test': []}
if skip_unzip is False:
util.unzip_files(zip_files, input_dir, root_dir)
img_folder = os.path.join(root_dir, 'training', 'image_2')
annot_folder = os.path.join(root_dir, 'training', 'label_2')
target_img_folder = os.path.join(root_dir, 'training', 'image_2-converted')
target_annot_folder = os.path.join(root_dir, 'training', 'label_2-json')
tags = glob.glob(os.path.join(img_folder, '*.png'))
tags = [os.path.basename(os.path.splitext(tag)[0]) for tag in tags]
assert len(tags) > 0, "No images found in {}".format(img_folder)
util.make_dir(target_img_folder)
util.make_dir(target_annot_folder)
manifest = []
for tag in tqdm(tags):
image = os.path.join(img_folder, tag + '.png')
annot = os.path.join(annot_folder, tag + '.txt')
assert os.path.exists(image), "{} not found.".format(image)
assert os.path.exists(annot), "{} not found.".format(annot)
target_image = os.path.join(target_img_folder, tag + '.png')
target_annot = os.path.join(target_annot_folder, tag + '.json')
convert_annot_to_json(annot, image, target_annot, difficult=True, img_reshape=None)
util.resize_image(image, target_image, img_reshape=None)
manifest.append((target_image, target_annot))
# shuffle files and split into training and validation set.
np.random.seed(0)
np.random.shuffle(manifest)
train_count = (len(manifest) * train_percent) // 100
train = manifest[:train_count]
val = manifest[train_count:]
util.create_manifest(train_manifest, train, root_dir)
util.create_manifest(val_manifest, val, root_dir)
# write SSD CONFIG
ssd_config = get_ssd_config(img_reshape)
ssd_config_path = os.path.join(root_dir, 'kitti_ssd_{}.cfg'.format(hw))
util.write_ssd_config(ssd_config, ssd_config_path, True)
# write SSD VAL CONFIG
ssd_config_val = get_ssd_config(img_reshape, True)
ssd_config_path_val = os.path.join(root_dir, 'kitti_ssd_{}_val.cfg'.format(hw))
util.write_ssd_config(ssd_config_val, ssd_config_path_val, True)
config_path = os.path.join(root_dir, 'kitti_{}.cfg'.format(hw))
config = {'manifest': '[train:{}, val:{}]'.format(train_manifest, val_manifest),
'manifest_root': root_dir,
'epochs': 100,
'height': img_reshape[0],
'width': img_reshape[0],
'ssd_config': '[train:{}, val:{}]'.format(ssd_config_path, ssd_config_path_val)
}
util.write_config(config, config_path)
if __name__ == '__main__':
from configargparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--input_dir', required=True, help='path to dir with KITTI zip files.')
parser.add_argument('--output_dir', required=True, help='path to unzip data.')
parser.add_argument('--overwrite', action='store_true', help='overwrite files')
parser.add_argument('--training_pct', default=90, help='fraction of data used for training.')
parser.add_argument('--skip_unzip', action='store_true', help='skip unzip')
args = parser.parse_args()
ingest_kitti(args.input_dir, args.output_dir, train_percent=args.training_pct,
overwrite=args.overwrite, skip_unzip=args.skip_unzip)
| apache-2.0 |
av8ramit/tensorflow | tensorflow/contrib/boosted_trees/python/kernel_tests/model_ops_test.py | 26 | 13389 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the GTFlow model ops.
The tests cover:
- Loading a model from protobufs.
- Running Predictions using an existing model.
- Serializing the model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.contrib.boosted_trees.proto import learner_pb2
from tensorflow.contrib.boosted_trees.proto import tree_config_pb2
from tensorflow.contrib.boosted_trees.python.ops import model_ops
from tensorflow.contrib.boosted_trees.python.ops import prediction_ops
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import saver
def _append_to_leaf(leaf, c_id, w):
"""Helper method for building tree leaves.
Appends weight contributions for the given class index to a leaf node.
Args:
leaf: leaf node to append to.
c_id: class Id for the weight update.
w: weight contribution value.
"""
leaf.sparse_vector.index.append(c_id)
leaf.sparse_vector.value.append(w)
def _set_float_split(split, feat_col, thresh, l_id, r_id):
"""Helper method for building tree float splits.
Sets split feature column, threshold and children.
Args:
split: split node to update.
feat_col: feature column for the split.
thresh: threshold to split on forming rule x <= thresh.
l_id: left child Id.
r_id: right child Id.
"""
split.feature_column = feat_col
split.threshold = thresh
split.left_id = l_id
split.right_id = r_id
class ModelOpsTest(test_util.TensorFlowTestCase):
def setUp(self):
"""Sets up test for model_ops.
Create a batch of two examples having one dense float, two sparse float and
one sparse int features.
The data looks like the following:
| Instance | Dense0 | SparseF0 | SparseF1 | SparseI0 |
| 0 | 7 | -3 | | |
| 1 | -2 | | 4 | 9,1 |
"""
super(ModelOpsTest, self).setUp()
self._dense_float_tensor = np.array([[7.0], [-2.0]])
self._sparse_float_indices1 = np.array([[0, 0]])
self._sparse_float_values1 = np.array([-3.0])
self._sparse_float_shape1 = np.array([2, 1])
self._sparse_float_indices2 = np.array([[1, 0]])
self._sparse_float_values2 = np.array([4.0])
self._sparse_float_shape2 = np.array([2, 1])
self._sparse_int_indices1 = np.array([[1, 0], [1, 1]])
self._sparse_int_values1 = np.array([9, 1])
self._sparse_int_shape1 = np.array([2, 2])
self._seed = 123
def testCreate(self):
with self.test_session():
tree_ensemble_config = tree_config_pb2.DecisionTreeEnsembleConfig()
tree = tree_ensemble_config.trees.add()
_append_to_leaf(tree.nodes.add().leaf, 0, -0.4)
tree_ensemble_config.tree_weights.append(1.0)
# Prepare learner config.
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
tree_ensemble_handle = model_ops.tree_ensemble_variable(
stamp_token=3,
tree_ensemble_config=tree_ensemble_config.SerializeToString(),
name="create_tree")
resources.initialize_resources(resources.shared_resources()).run()
result, _ = prediction_ops.gradient_trees_prediction(
tree_ensemble_handle,
self._seed, [self._dense_float_tensor], [
self._sparse_float_indices1, self._sparse_float_indices2
], [self._sparse_float_values1, self._sparse_float_values2],
[self._sparse_float_shape1,
self._sparse_float_shape2], [self._sparse_int_indices1],
[self._sparse_int_values1], [self._sparse_int_shape1],
learner_config=learner_config.SerializeToString(),
apply_dropout=False,
apply_averaging=False,
center_bias=False,
reduce_dim=True)
self.assertAllClose(result.eval(), [[-0.4], [-0.4]])
stamp_token = model_ops.tree_ensemble_stamp_token(tree_ensemble_handle)
self.assertEqual(stamp_token.eval(), 3)
def testSerialization(self):
with ops.Graph().as_default() as graph:
with self.test_session(graph):
tree_ensemble_config = tree_config_pb2.DecisionTreeEnsembleConfig()
# Bias tree only for second class.
tree1 = tree_ensemble_config.trees.add()
_append_to_leaf(tree1.nodes.add().leaf, 1, -0.2)
tree_ensemble_config.tree_weights.append(1.0)
# Depth 2 tree.
tree2 = tree_ensemble_config.trees.add()
tree_ensemble_config.tree_weights.append(1.0)
_set_float_split(tree2.nodes.add()
.sparse_float_binary_split_default_right.split, 1, 4.0,
1, 2)
_set_float_split(tree2.nodes.add().dense_float_binary_split, 0, 9.0, 3,
4)
_append_to_leaf(tree2.nodes.add().leaf, 0, 0.5)
_append_to_leaf(tree2.nodes.add().leaf, 1, 1.2)
_append_to_leaf(tree2.nodes.add().leaf, 0, -0.9)
tree_ensemble_handle = model_ops.tree_ensemble_variable(
stamp_token=7,
tree_ensemble_config=tree_ensemble_config.SerializeToString(),
name="saver_tree")
stamp_token, serialized_config = model_ops.tree_ensemble_serialize(
tree_ensemble_handle)
resources.initialize_resources(resources.shared_resources()).run()
self.assertEqual(stamp_token.eval(), 7)
serialized_config = serialized_config.eval()
with ops.Graph().as_default() as graph:
with self.test_session(graph):
tree_ensemble_handle2 = model_ops.tree_ensemble_variable(
stamp_token=9,
tree_ensemble_config=serialized_config,
name="saver_tree2")
resources.initialize_resources(resources.shared_resources()).run()
# Prepare learner config.
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 3
result, _ = prediction_ops.gradient_trees_prediction(
tree_ensemble_handle2,
self._seed, [self._dense_float_tensor], [
self._sparse_float_indices1, self._sparse_float_indices2
], [self._sparse_float_values1, self._sparse_float_values2],
[self._sparse_float_shape1,
self._sparse_float_shape2], [self._sparse_int_indices1],
[self._sparse_int_values1], [self._sparse_int_shape1],
learner_config=learner_config.SerializeToString(),
apply_dropout=False,
apply_averaging=False,
center_bias=False,
reduce_dim=True)
# Re-serialize tree.
stamp_token2, serialized_config2 = model_ops.tree_ensemble_serialize(
tree_ensemble_handle2)
# The first example will get bias class 1 -0.2 from first tree and
# leaf 2 payload (sparse feature missing) of 0.5 hence [0.5, -0.2],
# the second example will get the same bias class 1 -0.2 and leaf 3
# payload of class 1 1.2 hence [0.0, 1.0].
self.assertEqual(stamp_token2.eval(), 9)
# Class 2 does have scores in the leaf => it gets score 0.
self.assertEqual(serialized_config2.eval(), serialized_config)
self.assertAllClose(result.eval(), [[0.5, -0.2], [0, 1.0]])
def testRestore(self):
# Calling self.test_session() without a graph specified results in
# TensorFlowTestCase caching the session and returning the same one
# every time. In this test, we need to create two different sessions
# which is why we also create a graph and pass it to self.test_session()
# to ensure no caching occurs under the hood.
save_path = os.path.join(self.get_temp_dir(), "restore-test")
with ops.Graph().as_default() as graph:
with self.test_session(graph) as sess:
# Prepare learner config.
learner_config = learner_pb2.LearnerConfig()
learner_config.num_classes = 2
# Add the first tree and save.
tree_ensemble_config = tree_config_pb2.DecisionTreeEnsembleConfig()
tree = tree_ensemble_config.trees.add()
tree_ensemble_config.tree_metadata.add().is_finalized = True
tree_ensemble_config.tree_weights.append(1.0)
_append_to_leaf(tree.nodes.add().leaf, 0, -0.1)
tree_ensemble_handle = model_ops.tree_ensemble_variable(
stamp_token=3,
tree_ensemble_config=tree_ensemble_config.SerializeToString(),
name="restore_tree")
resources.initialize_resources(resources.shared_resources()).run()
variables.initialize_all_variables().run()
my_saver = saver.Saver()
# Add the second tree and replace the ensemble of the handle.
tree2 = tree_ensemble_config.trees.add()
tree_ensemble_config.tree_weights.append(1.0)
_append_to_leaf(tree2.nodes.add().leaf, 0, -1.0)
# Predict to confirm.
with ops.control_dependencies([
model_ops.tree_ensemble_deserialize(
tree_ensemble_handle,
stamp_token=3,
tree_ensemble_config=tree_ensemble_config.SerializeToString())
]):
result, _ = prediction_ops.gradient_trees_prediction(
tree_ensemble_handle,
self._seed, [self._dense_float_tensor], [
self._sparse_float_indices1, self._sparse_float_indices2
], [self._sparse_float_values1, self._sparse_float_values2],
[self._sparse_float_shape1,
self._sparse_float_shape2], [self._sparse_int_indices1],
[self._sparse_int_values1], [self._sparse_int_shape1],
learner_config=learner_config.SerializeToString(),
apply_dropout=False,
apply_averaging=False,
center_bias=False,
reduce_dim=True)
self.assertAllClose([[-1.1], [-1.1]], result.eval())
# Save before adding other trees.
val = my_saver.save(sess, save_path)
self.assertEqual(save_path, val)
# Add more trees after saving.
tree3 = tree_ensemble_config.trees.add()
tree_ensemble_config.tree_weights.append(1.0)
_append_to_leaf(tree3.nodes.add().leaf, 0, -10.0)
# Predict to confirm.
with ops.control_dependencies([
model_ops.tree_ensemble_deserialize(
tree_ensemble_handle,
stamp_token=3,
tree_ensemble_config=tree_ensemble_config.SerializeToString())
]):
result, _ = prediction_ops.gradient_trees_prediction(
tree_ensemble_handle,
self._seed, [self._dense_float_tensor], [
self._sparse_float_indices1, self._sparse_float_indices2
], [self._sparse_float_values1, self._sparse_float_values2],
[self._sparse_float_shape1,
self._sparse_float_shape2], [self._sparse_int_indices1],
[self._sparse_int_values1], [self._sparse_int_shape1],
learner_config=learner_config.SerializeToString(),
apply_dropout=False,
apply_averaging=False,
center_bias=False,
reduce_dim=True)
self.assertAllClose(result.eval(), [[-11.1], [-11.1]])
# Start a second session. In that session the parameter nodes
# have not been initialized either.
with ops.Graph().as_default() as graph:
with self.test_session(graph) as sess:
tree_ensemble_handle = model_ops.tree_ensemble_variable(
stamp_token=0, tree_ensemble_config="", name="restore_tree")
my_saver = saver.Saver()
my_saver.restore(sess, save_path)
result, _ = prediction_ops.gradient_trees_prediction(
tree_ensemble_handle,
self._seed, [self._dense_float_tensor], [
self._sparse_float_indices1, self._sparse_float_indices2
], [self._sparse_float_values1, self._sparse_float_values2],
[self._sparse_float_shape1,
self._sparse_float_shape2], [self._sparse_int_indices1],
[self._sparse_int_values1], [self._sparse_int_shape1],
learner_config=learner_config.SerializeToString(),
apply_dropout=False,
apply_averaging=False,
center_bias=False,
reduce_dim=True)
# Make sure we only have the first and second tree.
# The third tree was added after the save.
self.assertAllClose(result.eval(), [[-1.1], [-1.1]])
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
nzlosh/st2 | contrib/runners/winrm_runner/tests/unit/test_winrm_command_runner.py | 3 | 1685 | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import mock
from st2common.runners.base import ActionRunner
from st2tests.base import RunnerTestCase
from winrm_runner import winrm_command_runner
from winrm_runner.winrm_base import WinRmBaseRunner
class WinRmCommandRunnerTestCase(RunnerTestCase):
def setUp(self):
super(WinRmCommandRunnerTestCase, self).setUpClass()
self._runner = winrm_command_runner.get_runner()
def test_init(self):
runner = winrm_command_runner.WinRmCommandRunner("abcdef")
self.assertIsInstance(runner, WinRmBaseRunner)
self.assertIsInstance(runner, ActionRunner)
self.assertEqual(runner.runner_id, "abcdef")
@mock.patch("winrm_runner.winrm_command_runner.WinRmCommandRunner.run_cmd")
def test_run(self, mock_run_cmd):
mock_run_cmd.return_value = "expected"
self._runner.runner_parameters = {"cmd": "ipconfig /all"}
result = self._runner.run({})
self.assertEqual(result, "expected")
mock_run_cmd.assert_called_with("ipconfig /all")
| apache-2.0 |
carljm/django | django/contrib/gis/geos/prototypes/threadsafe.py | 529 | 2859 | import threading
from django.contrib.gis.geos.libgeos import (
CONTEXT_PTR, error_h, lgeos, notice_h,
)
class GEOSContextHandle(object):
"""
Python object representing a GEOS context handle.
"""
def __init__(self):
# Initializing the context handler for this thread with
# the notice and error handler.
self.ptr = lgeos.initGEOS_r(notice_h, error_h)
def __del__(self):
if self.ptr and lgeos:
lgeos.finishGEOS_r(self.ptr)
# Defining a thread-local object and creating an instance
# to hold a reference to GEOSContextHandle for this thread.
class GEOSContext(threading.local):
handle = None
thread_context = GEOSContext()
class GEOSFunc(object):
"""
Class that serves as a wrapper for GEOS C Functions, and will
use thread-safe function variants when available.
"""
def __init__(self, func_name):
try:
# GEOS thread-safe function signatures end with '_r', and
# take an additional context handle parameter.
self.cfunc = getattr(lgeos, func_name + '_r')
self.threaded = True
# Create a reference here to thread_context so it's not
# garbage-collected before an attempt to call this object.
self.thread_context = thread_context
except AttributeError:
# Otherwise, use usual function.
self.cfunc = getattr(lgeos, func_name)
self.threaded = False
def __call__(self, *args):
if self.threaded:
# If a context handle does not exist for this thread, initialize one.
if not self.thread_context.handle:
self.thread_context.handle = GEOSContextHandle()
# Call the threaded GEOS routine with pointer of the context handle
# as the first argument.
return self.cfunc(self.thread_context.handle.ptr, *args)
else:
return self.cfunc(*args)
def __str__(self):
return self.cfunc.__name__
# argtypes property
def _get_argtypes(self):
return self.cfunc.argtypes
def _set_argtypes(self, argtypes):
if self.threaded:
new_argtypes = [CONTEXT_PTR]
new_argtypes.extend(argtypes)
self.cfunc.argtypes = new_argtypes
else:
self.cfunc.argtypes = argtypes
argtypes = property(_get_argtypes, _set_argtypes)
# restype property
def _get_restype(self):
return self.cfunc.restype
def _set_restype(self, restype):
self.cfunc.restype = restype
restype = property(_get_restype, _set_restype)
# errcheck property
def _get_errcheck(self):
return self.cfunc.errcheck
def _set_errcheck(self, errcheck):
self.cfunc.errcheck = errcheck
errcheck = property(_get_errcheck, _set_errcheck)
| bsd-3-clause |
nicolargo/intellij-community | python/helpers/docutils/urischemes.py | 72 | 6277 | # $Id: urischemes.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
`schemes` is a dictionary with lowercase URI addressing schemes as
keys and descriptions as values. It was compiled from the index at
http://www.iana.org/assignments/uri-schemes (revised 2005-11-28)
and an older list at http://www.w3.org/Addressing/schemes.html.
"""
# Many values are blank and should be filled in with useful descriptions.
schemes = {
'about': 'provides information on Navigator',
'acap': 'Application Configuration Access Protocol; RFC 2244',
'addbook': "To add vCard entries to Communicator's Address Book",
'afp': 'Apple Filing Protocol',
'afs': 'Andrew File System global file names',
'aim': 'AOL Instant Messenger',
'callto': 'for NetMeeting links',
'castanet': 'Castanet Tuner URLs for Netcaster',
'chttp': 'cached HTTP supported by RealPlayer',
'cid': 'content identifier; RFC 2392',
'crid': 'TV-Anytime Content Reference Identifier; RFC 4078',
'data': ('allows inclusion of small data items as "immediate" data; '
'RFC 2397'),
'dav': 'Distributed Authoring and Versioning Protocol; RFC 2518',
'dict': 'dictionary service protocol; RFC 2229',
'dns': 'Domain Name System resources',
'eid': ('External ID; non-URL data; general escape mechanism to allow '
'access to information for applications that are too '
'specialized to justify their own schemes'),
'fax': ('a connection to a terminal that can handle telefaxes '
'(facsimiles); RFC 2806'),
'feed' : 'NetNewsWire feed',
'file': 'Host-specific file names; RFC 1738',
'finger': '',
'freenet': '',
'ftp': 'File Transfer Protocol; RFC 1738',
'go': 'go; RFC 3368',
'gopher': 'The Gopher Protocol',
'gsm-sms': ('Global System for Mobile Communications Short Message '
'Service'),
'h323': ('video (audiovisual) communication on local area networks; '
'RFC 3508'),
'h324': ('video and audio communications over low bitrate connections '
'such as POTS modem connections'),
'hdl': 'CNRI handle system',
'hnews': 'an HTTP-tunneling variant of the NNTP news protocol',
'http': 'Hypertext Transfer Protocol; RFC 2616',
'https': 'HTTP over SSL; RFC 2818',
'hydra': 'SubEthaEdit URI. See http://www.codingmonkeys.de/subethaedit.',
'iioploc': 'Internet Inter-ORB Protocol Location?',
'ilu': 'Inter-Language Unification',
'im': 'Instant Messaging; RFC 3860',
'imap': 'Internet Message Access Protocol; RFC 2192',
'info': 'Information Assets with Identifiers in Public Namespaces',
'ior': 'CORBA interoperable object reference',
'ipp': 'Internet Printing Protocol; RFC 3510',
'irc': 'Internet Relay Chat',
'iris.beep': 'iris.beep; RFC 3983',
'iseek' : 'See www.ambrosiasw.com; a little util for OS X.',
'jar': 'Java archive',
'javascript': ('JavaScript code; evaluates the expression after the '
'colon'),
'jdbc': 'JDBC connection URI.',
'ldap': 'Lightweight Directory Access Protocol',
'lifn': '',
'livescript': '',
'lrq': '',
'mailbox': 'Mail folder access',
'mailserver': 'Access to data available from mail servers',
'mailto': 'Electronic mail address; RFC 2368',
'md5': '',
'mid': 'message identifier; RFC 2392',
'mocha': '',
'modem': ('a connection to a terminal that can handle incoming data '
'calls; RFC 2806'),
'mtqp': 'Message Tracking Query Protocol; RFC 3887',
'mupdate': 'Mailbox Update (MUPDATE) Protocol; RFC 3656',
'news': 'USENET news; RFC 1738',
'nfs': 'Network File System protocol; RFC 2224',
'nntp': 'USENET news using NNTP access; RFC 1738',
'opaquelocktoken': 'RFC 2518',
'phone': '',
'pop': 'Post Office Protocol; RFC 2384',
'pop3': 'Post Office Protocol v3',
'pres': 'Presence; RFC 3859',
'printer': '',
'prospero': 'Prospero Directory Service; RFC 4157',
'rdar' : ('URLs found in Darwin source '
'(http://www.opensource.apple.com/darwinsource/).'),
'res': '',
'rtsp': 'real time streaming protocol; RFC 2326',
'rvp': '',
'rwhois': '',
'rx': 'Remote Execution',
'sdp': '',
'service': 'service location; RFC 2609',
'shttp': 'secure hypertext transfer protocol',
'sip': 'Session Initiation Protocol; RFC 3261',
'sips': 'secure session intitiaion protocol; RFC 3261',
'smb': 'SAMBA filesystems.',
'snews': 'For NNTP postings via SSL',
'snmp': 'Simple Network Management Protocol; RFC 4088',
'soap.beep': 'RFC 3288',
'soap.beeps': 'RFC 3288',
'ssh': 'Reference to interactive sessions via ssh.',
't120': 'real time data conferencing (audiographics)',
'tag': 'RFC 4151',
'tcp': '',
'tel': ('a connection to a terminal that handles normal voice '
'telephone calls, a voice mailbox or another voice messaging '
'system or a service that can be operated using DTMF tones; '
'RFC 2806.'),
'telephone': 'telephone',
'telnet': 'Reference to interactive sessions; RFC 4248',
'tftp': 'Trivial File Transfer Protocol; RFC 3617',
'tip': 'Transaction Internet Protocol; RFC 2371',
'tn3270': 'Interactive 3270 emulation sessions',
'tv': '',
'urn': 'Uniform Resource Name; RFC 2141',
'uuid': '',
'vemmi': 'versatile multimedia interface; RFC 2122',
'videotex': '',
'view-source': 'displays HTML code that was generated with JavaScript',
'wais': 'Wide Area Information Servers; RFC 4156',
'whodp': '',
'whois++': 'Distributed directory service.',
'x-man-page': ('Opens man page in Terminal.app on OS X '
'(see macosxhints.com)'),
'xmlrpc.beep': 'RFC 3529',
'xmlrpc.beeps': 'RFC 3529',
'z39.50r': 'Z39.50 Retrieval; RFC 2056',
'z39.50s': 'Z39.50 Session; RFC 2056',}
| apache-2.0 |
aschwaighofer/swift | utils/update_checkout/update_checkout/update_checkout.py | 5 | 23330 | # utils/update_checkout.py - Utility to update local checkouts --*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import print_function
import argparse
import json
import os
import platform
import re
import sys
import traceback
from functools import reduce
from multiprocessing import Lock, Pool, cpu_count, freeze_support
from build_swift.build_swift.constants import SWIFT_SOURCE_ROOT
from swift_build_support.swift_build_support import shell
SCRIPT_FILE = os.path.abspath(__file__)
SCRIPT_DIR = os.path.dirname(SCRIPT_FILE)
def run_parallel(fn, pool_args, n_processes=0):
"""Function used to run a given closure in parallel.
NOTE: This function was originally located in the shell module of
swift_build_support and should eventually be replaced with a better
parallel implementation.
"""
def init(l):
global lock
lock = l
if n_processes == 0:
n_processes = cpu_count() * 2
lk = Lock()
print("Running ``%s`` with up to %d processes." %
(fn.__name__, n_processes))
pool = Pool(processes=n_processes, initializer=init, initargs=(lk,))
results = pool.map_async(func=fn, iterable=pool_args).get(999999)
pool.close()
pool.join()
return results
def check_parallel_results(results, op):
"""Function used to check the results of run_parallel.
NOTE: This function was originally located in the shell module of
swift_build_support and should eventually be replaced with a better
parallel implementation.
"""
fail_count = 0
if results is None:
return 0
for r in results:
if r is not None:
if fail_count == 0:
print("======%s FAILURES======" % op)
print("%s failed (ret=%d): %s" % (r.repo_path, r.ret, r))
fail_count += 1
if r.stderr:
print(r.stderr)
return fail_count
def confirm_tag_in_repo(tag, repo_name):
tag_exists = shell.capture(['git', 'ls-remote', '--tags',
'origin', tag], echo=False)
if not tag_exists:
print("Tag '" + tag + "' does not exist for '" +
repo_name + "', just updating regularly")
tag = None
return tag
def find_rev_by_timestamp(timestamp, repo_name, refspec):
args = ["git", "log", "-1", "--format=%H", "--first-parent",
'--before=' + timestamp, refspec]
rev = shell.capture(args).strip()
if rev:
return rev
else:
raise RuntimeError('No rev in %s before timestamp %s' %
(repo_name, timestamp))
def get_branch_for_repo(config, repo_name, scheme_name, scheme_map,
cross_repos_pr):
cross_repo = False
repo_branch = scheme_name
if scheme_map:
scheme_branch = scheme_map[repo_name]
repo_branch = scheme_branch
remote_repo_id = config['repos'][repo_name]['remote']['id']
if remote_repo_id in cross_repos_pr:
cross_repo = True
pr_id = cross_repos_pr[remote_repo_id]
repo_branch = "ci_pr_{0}".format(pr_id)
shell.run(["git", "checkout", scheme_branch],
echo=True)
shell.capture(["git", "branch", "-D", repo_branch],
echo=True, allow_non_zero_exit=True)
shell.run(["git", "fetch", "origin",
"pull/{0}/merge:{1}"
.format(pr_id, repo_branch), "--tags"], echo=True)
return repo_branch, cross_repo
def update_single_repository(pool_args):
source_root, config, repo_name, scheme_name, scheme_map, tag, timestamp, \
reset_to_remote, should_clean, cross_repos_pr = pool_args
repo_path = os.path.join(source_root, repo_name)
if not os.path.isdir(repo_path) or os.path.islink(repo_path):
return
try:
print("Updating '" + repo_path + "'")
with shell.pushd(repo_path, dry_run=False, echo=False):
cross_repo = False
checkout_target = None
if tag:
checkout_target = confirm_tag_in_repo(tag, repo_name)
elif scheme_name:
checkout_target, cross_repo = get_branch_for_repo(
config, repo_name, scheme_name, scheme_map, cross_repos_pr)
if timestamp:
checkout_target = find_rev_by_timestamp(timestamp,
repo_name,
checkout_target)
# The clean option restores a repository to pristine condition.
if should_clean:
shell.run(['git', 'clean', '-fdx'], echo=True)
shell.run(['git', 'submodule', 'foreach', '--recursive', 'git',
'clean', '-fdx'], echo=True)
shell.run(['git', 'submodule', 'foreach', '--recursive', 'git',
'reset', '--hard', 'HEAD'], echo=True)
shell.run(['git', 'reset', '--hard', 'HEAD'], echo=True)
# It is possible to reset --hard and still be mid-rebase.
try:
shell.run(['git', 'rebase', '--abort'], echo=True)
except Exception:
pass
if checkout_target:
shell.run(['git', 'status', '--porcelain', '-uno'],
echo=False)
shell.run(['git', 'checkout', checkout_target], echo=True)
# It's important that we checkout, fetch, and rebase, in order.
# .git/FETCH_HEAD updates the not-for-merge attributes based on
# which branch was checked out during the fetch.
shell.run(["git", "fetch", "--recurse-submodules=yes", "--tags"],
echo=True)
# If we were asked to reset to the specified branch, do the hard
# reset and return.
if checkout_target and reset_to_remote and not cross_repo:
full_target = full_target_name('origin', checkout_target)
shell.run(['git', 'reset', '--hard', full_target], echo=True)
return
# Query whether we have a "detached HEAD", which will mean that
# we previously checked out a tag rather than a branch.
detached_head = False
try:
# This git command returns error code 1 if HEAD is detached.
# Otherwise there was some other error, and we need to handle
# it like other command errors.
shell.run(["git", "symbolic-ref", "-q", "HEAD"], echo=False)
except Exception as e:
if e.ret == 1:
detached_head = True
else:
raise # Pass this error up the chain.
# If we have a detached HEAD in this repository, we don't want
# to rebase. With a detached HEAD, the fetch will have marked
# all the branches in FETCH_HEAD as not-for-merge, and the
# "git rebase FETCH_HEAD" will try to rebase the tree from the
# default branch's current head, making a mess.
# Prior to Git 2.6, this is the way to do a "git pull
# --rebase" that respects rebase.autostash. See
# http://stackoverflow.com/a/30209750/125349
if not cross_repo and not detached_head:
shell.run(["git", "rebase", "FETCH_HEAD"], echo=True)
elif detached_head:
print(repo_path,
"\nDetached HEAD; probably checked out a tag. No need "
"to rebase.\n")
shell.run(["git", "submodule", "update", "--recursive"], echo=True)
except Exception:
(type, value, tb) = sys.exc_info()
print('Error on repo "%s": %s' % (repo_path, traceback.format_exc()))
return value
def get_timestamp_to_match(args):
if not args.match_timestamp:
return None
with shell.pushd(os.path.join(args.source_root, "swift"),
dry_run=False, echo=False):
return shell.capture(["git", "log", "-1", "--format=%cI"],
echo=False).strip()
def update_all_repositories(args, config, scheme_name, cross_repos_pr):
scheme_map = None
if scheme_name:
# This loop is only correct, since we know that each alias set has
# unique contents. This is checked by validate_config. Thus the first
# branch scheme data that has scheme_name as one of its aliases is
# the only possible correct answer.
for v in config['branch-schemes'].values():
if scheme_name in v['aliases']:
scheme_map = v['repos']
break
pool_args = []
timestamp = get_timestamp_to_match(args)
for repo_name in config['repos'].keys():
if repo_name in args.skip_repository_list:
print("Skipping update of '" + repo_name + "', requested by user")
continue
my_args = [args.source_root, config,
repo_name,
scheme_name,
scheme_map,
args.tag,
timestamp,
args.reset_to_remote,
args.clean,
cross_repos_pr]
pool_args.append(my_args)
return run_parallel(update_single_repository, pool_args, args.n_processes)
def obtain_additional_swift_sources(pool_args):
(args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name,
skip_history, skip_repository_list) = pool_args
env = dict(os.environ)
env.update({'GIT_TERMINAL_PROMPT': 0})
with shell.pushd(args.source_root, dry_run=False, echo=False):
print("Cloning '" + repo_name + "'")
if skip_history:
shell.run(['git', 'clone',
'--recursive', '--depth', '1', '--branch',
repo_branch, remote, repo_name],
env=env,
echo=True)
else:
shell.run(['git', 'clone',
'--recursive', remote, repo_name],
env=env,
echo=True)
if scheme_name:
src_path = os.path.join(args.source_root, repo_name, ".git")
shell.run(['git', '--git-dir',
src_path, '--work-tree',
os.path.join(args.source_root, repo_name),
'checkout', repo_branch],
env=env,
echo=False)
with shell.pushd(os.path.join(args.source_root, repo_name),
dry_run=False, echo=False):
shell.run(["git", "submodule",
"update", "--recursive"],
env=env,
echo=False)
def obtain_all_additional_swift_sources(args, config, with_ssh, scheme_name,
skip_history, skip_repository_list):
pool_args = []
with shell.pushd(args.source_root, dry_run=False, echo=False):
for repo_name, repo_info in config['repos'].items():
if repo_name in skip_repository_list:
print("Skipping clone of '" + repo_name + "', requested by "
"user")
continue
if os.path.isdir(os.path.join(repo_name, ".git")):
print("Skipping clone of '" + repo_name + "', directory "
"already exists")
continue
# If we have a url override, use that url instead of
# interpolating.
remote_repo_info = repo_info['remote']
if 'url' in remote_repo_info:
remote = remote_repo_info['url']
else:
remote_repo_id = remote_repo_info['id']
if with_ssh is True or 'https-clone-pattern' not in config:
remote = config['ssh-clone-pattern'] % remote_repo_id
else:
remote = config['https-clone-pattern'] % remote_repo_id
repo_branch = None
repo_not_in_scheme = False
if scheme_name:
for v in config['branch-schemes'].values():
if scheme_name not in v['aliases']:
continue
# If repo is not specified in the scheme, skip cloning it.
if repo_name not in v['repos']:
repo_not_in_scheme = True
continue
repo_branch = v['repos'][repo_name]
break
else:
repo_branch = scheme_name
if repo_not_in_scheme:
continue
pool_args.append([args, repo_name, repo_info, repo_branch, remote,
with_ssh, scheme_name, skip_history,
skip_repository_list])
if not pool_args:
print("Not cloning any repositories.")
return
return run_parallel(
obtain_additional_swift_sources, pool_args, args.n_processes)
def dump_repo_hashes(args, config, branch_scheme_name='repro'):
"""
Dumps the current state of the repo into a new config file that contains a
master branch scheme with the relevant branches set to the appropriate
hashes.
"""
new_config = {}
config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos']
for config_copy_key in config_copy_keys:
new_config[config_copy_key] = config[config_copy_key]
repos = {}
repos = repo_hashes(args, config)
branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos}
new_config['branch-schemes'] = {branch_scheme_name: branch_scheme}
json.dump(new_config, sys.stdout, indent=4)
def repo_hashes(args, config):
repos = {}
for repo_name, repo_info in sorted(config['repos'].items(),
key=lambda x: x[0]):
repo_path = os.path.join(args.source_root, repo_name)
if os.path.exists(repo_path):
with shell.pushd(repo_path, dry_run=False, echo=False):
h = shell.capture(["git", "rev-parse", "HEAD"],
echo=False).strip()
else:
h = 'skip'
repos[repo_name] = str(h)
return repos
def print_repo_hashes(args, config):
repos = repo_hashes(args, config)
for repo_name, repo_hash in sorted(repos.items(),
key=lambda x: x[0]):
print("{:<35}: {:<35}".format(repo_name, repo_hash))
def validate_config(config):
# Make sure that our branch-names are unique.
scheme_names = config['branch-schemes'].keys()
if len(scheme_names) != len(set(scheme_names)):
raise RuntimeError('Configuration file has duplicate schemes?!')
# Ensure the branch-scheme name is also an alias
# This guarantees sensible behavior of update_repository_to_scheme when
# the branch-scheme is passed as the scheme name
for scheme_name in config['branch-schemes'].keys():
if scheme_name not in config['branch-schemes'][scheme_name]['aliases']:
raise RuntimeError('branch-scheme name: "{0}" must be an alias '
'too.'.format(scheme_name))
# Then make sure the alias names used by our branches are unique.
#
# We do this by constructing a list consisting of len(names),
# set(names). Then we reduce over that list summing the counts and taking
# the union of the sets. We have uniqueness if the length of the union
# equals the length of the sum of the counts.
data = [(len(v['aliases']), set(v['aliases']))
for v in config['branch-schemes'].values()]
result = reduce(lambda acc, x: (acc[0] + x[0], acc[1] | x[1]), data,
(0, set([])))
if result[0] == len(result[1]):
return
raise RuntimeError('Configuration file has schemes with duplicate '
'aliases?!')
def full_target_name(repository, target):
tag = shell.capture(["git", "tag", "-l", target], echo=True).strip()
if tag == target:
return tag
branch = shell.capture(["git", "branch", "--list", target],
echo=True).strip().replace("* ", "")
if branch == target:
name = "%s/%s" % (repository, target)
return name
raise RuntimeError('Cannot determine if %s is a branch or a tag' % target)
def skip_list_for_platform(config):
# If there is a platforms key only include the repo if the
# plaform is in the list
skip_list = []
platform_name = platform.system()
for repo_name, repo_info in config['repos'].items():
if 'platforms' in repo_info:
if platform_name not in repo_info['platforms']:
print("Skipping", repo_name, "on", platform_name)
skip_list.append(repo_name)
else:
print("Including", repo_name, "on", platform_name)
return skip_list
def main():
freeze_support()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="""
By default, updates your checkouts of Swift, SourceKit, LLDB, and SwiftPM
repositories.
""")
parser.add_argument(
"--clone",
help="Obtain Sources for Swift and Related Projects",
action="store_true")
parser.add_argument(
"--clone-with-ssh",
help="Obtain Sources for Swift and Related Projects via SSH",
action="store_true")
parser.add_argument(
"--skip-history",
help="Skip histories when obtaining sources",
action="store_true")
parser.add_argument(
"--skip-repository",
metavar="DIRECTORY",
default=[],
help="Skip the specified repository",
dest='skip_repository_list',
action="append")
parser.add_argument(
"--scheme",
help='Use branches from the specified branch-scheme. A "branch-scheme"'
' is a list of (repo, branch) pairs.',
metavar='BRANCH-SCHEME',
dest='scheme')
parser.add_argument(
'--reset-to-remote',
help='Reset each branch to the remote state.',
action='store_true')
parser.add_argument(
'--clean',
help='Clean unrelated files from each repository.',
action='store_true')
parser.add_argument(
"--config",
default=os.path.join(SCRIPT_DIR, os.pardir,
"update-checkout-config.json"),
help="Configuration file to use")
parser.add_argument(
"--github-comment",
help="""Check out related pull requests referenced in the given
free-form GitHub-style comment.""",
metavar='GITHUB-COMMENT',
dest='github_comment')
parser.add_argument(
'--dump-hashes',
action='store_true',
help='Dump the git hashes of all repositories being tracked')
parser.add_argument(
'--dump-hashes-config',
help='Dump the git hashes of all repositories packaged into '
'update-checkout-config.json',
metavar='BRANCH-SCHEME-NAME')
parser.add_argument(
"--tag",
help="""Check out each repository to the specified tag.""",
metavar='TAG-NAME')
parser.add_argument(
"--match-timestamp",
help='Check out adjacent repositories to match timestamp of '
' current swift checkout.',
action='store_true')
parser.add_argument(
"-j", "--jobs",
type=int,
help="Number of threads to run at once",
default=0,
dest="n_processes")
parser.add_argument(
"--source-root",
help="The root directory to checkout repositories",
default=SWIFT_SOURCE_ROOT,
dest='source_root')
args = parser.parse_args()
if not args.scheme:
if args.reset_to_remote:
print("update-checkout usage error: --reset-to-remote must "
"specify --scheme=foo")
sys.exit(1)
if args.match_timestamp:
# without a scheme, we won't be able match timestamps forward in
# time, which is an annoying footgun for bisection etc.
print("update-checkout usage error: --match-timestamp must "
"specify --scheme=foo")
sys.exit(1)
clone = args.clone
clone_with_ssh = args.clone_with_ssh
skip_history = args.skip_history
scheme = args.scheme
github_comment = args.github_comment
with open(args.config) as f:
config = json.load(f)
validate_config(config)
if args.dump_hashes:
dump_repo_hashes(args, config)
return (None, None)
if args.dump_hashes_config:
dump_repo_hashes(args, config, args.dump_hashes_config)
return (None, None)
cross_repos_pr = {}
if github_comment:
regex_pr = r'(apple/[-a-zA-Z0-9_]+/pull/\d+|apple/[-a-zA-Z0-9_]+#\d+)'
repos_with_pr = re.findall(regex_pr, github_comment)
print("Found related pull requests:", str(repos_with_pr))
repos_with_pr = [pr.replace('/pull/', '#') for pr in repos_with_pr]
cross_repos_pr = dict(pr.split('#') for pr in repos_with_pr)
clone_results = None
if clone or clone_with_ssh:
# If branch is None, default to using the default branch alias
# specified by our configuration file.
if scheme is None:
scheme = config['default-branch-scheme']
skip_repo_list = skip_list_for_platform(config)
skip_repo_list.extend(args.skip_repository_list)
clone_results = obtain_all_additional_swift_sources(args, config,
clone_with_ssh,
scheme,
skip_history,
skip_repo_list)
# Quick check whether somebody is calling update in an empty directory
directory_contents = os.listdir(args.source_root)
if not ('cmark' in directory_contents or
'llvm' in directory_contents or
'clang' in directory_contents):
print("You don't have all swift sources. "
"Call this script with --clone to get them.")
update_results = update_all_repositories(args, config, scheme,
cross_repos_pr)
fail_count = 0
fail_count += check_parallel_results(clone_results, "CLONE")
fail_count += check_parallel_results(update_results, "UPDATE")
if fail_count > 0:
print("update-checkout failed, fix errors and try again")
else:
print("update-checkout succeeded")
print_repo_hashes(args, config)
sys.exit(fail_count)
| apache-2.0 |
wjlei1990/pycmt3d | src/pycmt3d/log_util.py | 1 | 4264 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
log util
:copyright:
Wenjie Lei (lei@princeton.edu), 2016
:license:
GNU Lesser General Public License, version 3 (LGPLv3)
(http://www.gnu.org/licenses/lgpl-3.0.en.html)
"""
from __future__ import (print_function, division, absolute_import)
import numpy as np
from . import logger
from .util import get_cmt_par
def inversion_result_table(npar, cmtsource, new_cmtsource,
bootstrap_flag=False, bootstrap_mean=None,
bootstrap_std=None,
bootstrap_std_over_mean=None):
"""
Print out the inversion table
:return:
"""
if npar < 6 or npar > 11:
raise ValueError("npar(%d) should be within [6, 11]")
title = "*" * 20 + " Inversion Result Table(%d npar) " % \
npar + "*" * 20
logger.info(title)
mattrs = ["m_rr", "m_tt", "m_pp", "m_rt", "m_rp", "m_tp"]
lattrs = ["depth_in_m", "longitude", "latitude", "time_shift",
"half_duration"]
lattr_names = {"depth_in_m": "dep", "longitude": "lon",
"latitude": "lat", "time_shift": "tshift",
"half_duration": "hdr"}
if not bootstrap_flag:
logger.info("PAR Old_CMT New_CMT")
for attr in mattrs:
logger.info("%s: %15.6e %15.6e" % (
attr, getattr(cmtsource, attr), getattr(new_cmtsource, attr)))
for idx in range(npar - 6):
attr = lattrs[idx]
logger.info("%s: %15.3f %15.3f" % (
lattr_names[attr], getattr(cmtsource, attr),
getattr(new_cmtsource, attr)))
else:
logger.info("PAR Old_CMT New_CMT "
"Bootstrap_Mean Bootstrap_STD STD/Mean")
for idx, attr in enumerate(mattrs):
logger.info(
"%s: %15.6e %15.6e %15.6e %15.6e %10.2f%%" % (
attr, getattr(cmtsource, attr),
getattr(new_cmtsource, attr),
bootstrap_mean[idx], bootstrap_std[idx],
bootstrap_std_over_mean[idx] * 100))
for idx in range(npar - 6):
attr = lattrs[idx]
logger.info("%s: %15.3f %15.3f %15.3f %15.3f %10.2f%%" % (
lattr_names[attr],
getattr(cmtsource, attr),
getattr(new_cmtsource, attr),
bootstrap_mean[idx + 6],
bootstrap_std[idx + 6],
bootstrap_std_over_mean[idx + 6]))
def fmt_cmt_par(data):
if len(data) == 9:
return "MomentTensor=({:.4e}, {:.4e}, {:.4e}, {:.4e}, " \
"{:.4e}, {:.4e}), Depth={:.2f} m, Longitude={:.2f}, "\
"Latitude={:.2f}]".format(*data)
elif len(data) == 7:
return "MomentTensor=({:.4e}, {:.4e}, {:.4e}, {:.4e}, " \
"{:.4e}, {:.4e}), Depth={:.2f} m".format(*data)
elif len(data) == 6:
return "MomentTensor=({:.4e}, {:.4e}, {:.4e}, {:.4e}, " \
"{:.4e}, {:.4e})".format(*data)
else:
raise ValueError("Uknown cmt par length: {}".format(len(data)))
def print_inversion_summary(npar, cmtsource, new_cmtsource,
bootstrap=False, bmean=None, bstd=None,
bstd_over_mean=None):
"""
Print out the inversion summary
:return:
"""
logger.info("*" * 20)
logger.info("Invert cmt parameters Summary (%d par)" % npar)
def _fmt_(v):
return "%.4e" % v
cmt_par = get_cmt_par(cmtsource)[:npar]
new_cmt_par = get_cmt_par(new_cmtsource)[:npar]
logger.info("Old CMT: {}".format(fmt_cmt_par(cmt_par)))
logger.info("dm: [%s]" % (
', '.join(map(_fmt_, new_cmt_par - cmt_par))))
logger.info("New CMT: {}".format(fmt_cmt_par(new_cmt_par)))
logger.info("Trace of Moment Tensor: %e" % (np.sum(new_cmt_par[0:3])))
logger.info("Energy (Scalar Moment) Change: %5.2f%%" % (
(new_cmtsource.M0 - cmtsource.M0) /
cmtsource.M0 * 100.0))
inversion_result_table(
npar, cmtsource, new_cmtsource, bootstrap_flag=bootstrap,
bootstrap_mean=bmean, bootstrap_std=bstd,
bootstrap_std_over_mean=bstd_over_mean)
| lgpl-3.0 |
tjsavage/full_nonrel_starter | django/contrib/localflavor/ca/forms.py | 309 | 4523 | """
Canada-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
import re
phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
sin_re = re.compile(r"^(\d{3})-(\d{3})-(\d{3})$")
class CAPostalCodeField(RegexField):
"""
Canadian postal code field.
Validates against known invalid characters: D, F, I, O, Q, U
Additionally the first character cannot be Z or W.
For more info see:
http://www.canadapost.ca/tools/pg/manual/PGaddress-e.asp#1402170
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXX XXX.'),
}
def __init__(self, *args, **kwargs):
super(CAPostalCodeField, self).__init__(r'^[ABCEGHJKLMNPRSTVXY]\d[ABCEGHJKLMNPRSTVWXYZ] \d[ABCEGHJKLMNPRSTVWXYZ]\d$',
max_length=None, min_length=None, *args, **kwargs)
class CAPhoneNumberField(Field):
"""Canadian phone number field."""
default_error_messages = {
'invalid': u'Phone numbers must be in XXX-XXX-XXXX format.',
}
def clean(self, value):
"""Validate a phone number.
"""
super(CAPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
m = phone_digits_re.search(value)
if m:
return u'%s-%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
class CAProvinceField(Field):
"""
A form field that validates its input is a Canadian province name or abbreviation.
It normalizes the input to the standard two-leter postal service
abbreviation for the given province.
"""
default_error_messages = {
'invalid': u'Enter a Canadian province or territory.',
}
def clean(self, value):
from ca_provinces import PROVINCES_NORMALIZED
super(CAProvinceField, self).clean(value)
if value in EMPTY_VALUES:
return u''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
try:
return PROVINCES_NORMALIZED[value.strip().lower()].decode('ascii')
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class CAProvinceSelect(Select):
"""
A Select widget that uses a list of Canadian provinces and
territories as its choices.
"""
def __init__(self, attrs=None):
from ca_provinces import PROVINCE_CHOICES # relative import
super(CAProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class CASocialInsuranceNumberField(Field):
"""
A Canadian Social Insurance Number (SIN).
Checks the following rules to determine whether the number is valid:
* Conforms to the XXX-XXX-XXX format.
* Passes the check digit process "Luhn Algorithm"
See: http://en.wikipedia.org/wiki/Social_Insurance_Number
"""
default_error_messages = {
'invalid': _('Enter a valid Canadian Social Insurance number in XXX-XXX-XXX format.'),
}
def clean(self, value):
super(CASocialInsuranceNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
match = re.match(sin_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
number = u'%s-%s-%s' % (match.group(1), match.group(2), match.group(3))
check_number = u'%s%s%s' % (match.group(1), match.group(2), match.group(3))
if not self.luhn_checksum_is_valid(check_number):
raise ValidationError(self.error_messages['invalid'])
return number
def luhn_checksum_is_valid(self, number):
"""
Checks to make sure that the SIN passes a luhn mod-10 checksum
See: http://en.wikipedia.org/wiki/Luhn_algorithm
"""
sum = 0
num_digits = len(number)
oddeven = num_digits & 1
for count in range(0, num_digits):
digit = int(number[count])
if not (( count & 1 ) ^ oddeven ):
digit = digit * 2
if digit > 9:
digit = digit - 9
sum = sum + digit
return ( (sum % 10) == 0 )
| bsd-3-clause |
eduNEXT/edx-platform | cms/djangoapps/contentstore/tests/test_orphan.py | 4 | 10594 | """
Test finding orphans via the view and django config
"""
import json
import ddt
from opaque_keys.edx.locator import BlockUsageLocator
from cms.djangoapps.contentstore.tests.utils import CourseTestCase
from cms.djangoapps.contentstore.utils import reverse_course_url
from common.djangoapps.student.models import CourseEnrollment
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.search import path_to_location
from xmodule.modulestore.tests.factories import CourseFactory, check_mongo_calls_range
class TestOrphanBase(CourseTestCase):
"""
Base class for Studio tests that require orphaned modules
"""
def create_course_with_orphans(self, default_store):
"""
Creates a course with 3 orphan modules, one of which
has a child that's also in the course tree.
"""
course = CourseFactory.create(default_store=default_store)
# create chapters and add them to course tree
chapter1 = self.store.create_child(self.user.id, course.location, 'chapter', "Chapter1")
self.store.publish(chapter1.location, self.user.id)
chapter2 = self.store.create_child(self.user.id, course.location, 'chapter', "Chapter2")
self.store.publish(chapter2.location, self.user.id)
# orphan chapter
orphan_chapter = self.store.create_item(self.user.id, course.id, 'chapter', "OrphanChapter")
self.store.publish(orphan_chapter.location, self.user.id)
# create vertical and add it as child to chapter1
vertical1 = self.store.create_child(self.user.id, chapter1.location, 'vertical', "Vertical1")
self.store.publish(vertical1.location, self.user.id)
# create orphan vertical
orphan_vertical = self.store.create_item(self.user.id, course.id, 'vertical', "OrphanVert")
self.store.publish(orphan_vertical.location, self.user.id)
# create component and add it to vertical1
html1 = self.store.create_child(self.user.id, vertical1.location, 'html', "Html1")
self.store.publish(html1.location, self.user.id)
# create component and add it as a child to vertical1 and orphan_vertical
multi_parent_html = self.store.create_child(self.user.id, vertical1.location, 'html', "multi_parent_html")
self.store.publish(multi_parent_html.location, self.user.id)
orphan_vertical.children.append(multi_parent_html.location)
self.store.update_item(orphan_vertical, self.user.id)
# create an orphaned html module
orphan_html = self.store.create_item(self.user.id, course.id, 'html', "OrphanHtml")
self.store.publish(orphan_html.location, self.user.id)
self.store.create_child(self.user.id, course.location, 'static_tab', "staticuno")
self.store.create_child(self.user.id, course.location, 'course_info', "updates")
return course
def assertOrphanCount(self, course_key, number):
"""
Asserts that we have the expected count of orphans
for a given course_key
"""
self.assertEqual(len(self.store.get_orphans(course_key)), number)
@ddt.ddt
class TestOrphan(TestOrphanBase):
"""
Test finding orphans via view and django config
"""
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_get_orphans(self, default_store):
"""
Test that the orphan handler finds the orphans
"""
course = self.create_course_with_orphans(default_store)
orphan_url = reverse_course_url('orphan_handler', course.id)
orphans = json.loads(
self.client.get(
orphan_url,
HTTP_ACCEPT='application/json'
).content.decode('utf-8')
)
self.assertEqual(len(orphans), 3, f"Wrong # {orphans}")
location = course.location.replace(category='chapter', name='OrphanChapter')
self.assertIn(str(location), orphans)
location = course.location.replace(category='vertical', name='OrphanVert')
self.assertIn(str(location), orphans)
location = course.location.replace(category='html', name='OrphanHtml')
self.assertIn(str(location), orphans)
@ddt.data(
(ModuleStoreEnum.Type.split, 9, 5),
(ModuleStoreEnum.Type.mongo, 34, 12),
)
@ddt.unpack
def test_delete_orphans(self, default_store, max_mongo_calls, min_mongo_calls):
"""
Test that the orphan handler deletes the orphans
"""
course = self.create_course_with_orphans(default_store)
orphan_url = reverse_course_url('orphan_handler', course.id)
with check_mongo_calls_range(max_mongo_calls, min_mongo_calls):
self.client.delete(orphan_url)
orphans = json.loads(
self.client.get(orphan_url, HTTP_ACCEPT='application/json').content.decode('utf-8')
)
self.assertEqual(len(orphans), 0, f"Orphans not deleted {orphans}")
# make sure that any children with one orphan parent and one non-orphan
# parent are not deleted
self.assertTrue(self.store.has_item(course.id.make_usage_key('html', "multi_parent_html")))
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_not_permitted(self, default_store):
"""
Test that auth restricts get and delete appropriately
"""
course = self.create_course_with_orphans(default_store)
orphan_url = reverse_course_url('orphan_handler', course.id)
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, course.id)
response = test_user_client.get(orphan_url)
self.assertEqual(response.status_code, 403)
response = test_user_client.delete(orphan_url)
self.assertEqual(response.status_code, 403)
@ddt.data(ModuleStoreEnum.Type.split)
def test_path_to_location_for_orphan_vertical(self, module_store):
r"""
Make sure that path_to_location works with a component having multiple vertical parents,
from which one of them is orphan.
course
|
chapter
|
vertical vertical
\ /
html
"""
# Get a course with orphan modules
course = self.create_course_with_orphans(module_store)
# Fetch the required course components.
vertical1 = self.store.get_item(BlockUsageLocator(course.id, 'vertical', 'Vertical1'))
chapter1 = self.store.get_item(BlockUsageLocator(course.id, 'chapter', 'Chapter1'))
orphan_vertical = self.store.get_item(BlockUsageLocator(course.id, 'vertical', 'OrphanVert'))
multi_parent_html = self.store.get_item(BlockUsageLocator(course.id, 'html', 'multi_parent_html'))
# Verify `OrphanVert` is an orphan
self.assertIn(orphan_vertical.location, self.store.get_orphans(course.id))
# Verify `multi_parent_html` is child of both `Vertical1` and `OrphanVert`
self.assertIn(multi_parent_html.location, orphan_vertical.children)
self.assertIn(multi_parent_html.location, vertical1.children)
# HTML component has `vertical1` as its parent.
html_parent = self.store.get_parent_location(multi_parent_html.location)
self.assertNotEqual(str(html_parent), str(orphan_vertical.location))
self.assertEqual(str(html_parent), str(vertical1.location))
# Get path of the `multi_parent_html` & verify path_to_location returns a expected path
path = path_to_location(self.store, multi_parent_html.location)
expected_path = (
course.id,
chapter1.location.block_id,
vertical1.location.block_id,
multi_parent_html.location.block_id,
"",
path[-1]
)
self.assertIsNotNone(path)
self.assertEqual(len(path), 6)
self.assertEqual(path, expected_path)
@ddt.data(ModuleStoreEnum.Type.split)
def test_path_to_location_for_orphan_chapter(self, module_store):
r"""
Make sure that path_to_location works with a component having multiple chapter parents,
from which one of them is orphan
course
|
chapter chapter
| |
vertical vertical
\ /
html
"""
# Get a course with orphan modules
course = self.create_course_with_orphans(module_store)
orphan_chapter = self.store.get_item(BlockUsageLocator(course.id, 'chapter', 'OrphanChapter'))
chapter1 = self.store.get_item(BlockUsageLocator(course.id, 'chapter', 'Chapter1'))
vertical1 = self.store.get_item(BlockUsageLocator(course.id, 'vertical', 'Vertical1'))
# Verify `OrhanChapter` is an orphan
self.assertIn(orphan_chapter.location, self.store.get_orphans(course.id))
# Create a vertical (`Vertical0`) in orphan chapter (`OrphanChapter`).
# OrphanChapter -> Vertical0
vertical0 = self.store.create_child(self.user.id, orphan_chapter.location, 'vertical', "Vertical0")
self.store.publish(vertical0.location, self.user.id)
# Create a component in `Vertical0`
# OrphanChapter -> Vertical0 -> Html
html = self.store.create_child(self.user.id, vertical0.location, 'html', "HTML0")
self.store.publish(html.location, self.user.id)
# Verify chapter1 is parent of vertical1.
vertical1_parent = self.store.get_parent_location(vertical1.location)
self.assertEqual(str(vertical1_parent), str(chapter1.location))
# Make `Vertical1` the parent of `HTML0`. So `HTML0` will have to parents (`Vertical0` & `Vertical1`)
vertical1.children.append(html.location)
self.store.update_item(vertical1, self.user.id)
# Get parent location & verify its either of the two verticals. As both parents are non-orphan,
# alphabetically least is returned
html_parent = self.store.get_parent_location(html.location)
self.assertEqual(str(html_parent), str(vertical1.location))
# verify path_to_location returns a expected path
path = path_to_location(self.store, html.location)
expected_path = (
course.id,
chapter1.location.block_id,
vertical1.location.block_id,
html.location.block_id,
"",
path[-1]
)
self.assertIsNotNone(path)
self.assertEqual(len(path), 6)
self.assertEqual(path, expected_path)
| agpl-3.0 |
tlatzko/spmcluster | .tox/docs/lib/python2.7/site-packages/sphinx/websupport/storage/differ.py | 5 | 2603 | # -*- coding: utf-8 -*-
"""
sphinx.websupport.storage.differ
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A differ for creating an HTML representations of proposal diffs
:copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from difflib import Differ
from sphinx.util.pycompat import htmlescape
class CombinedHtmlDiff(object):
"""Create an HTML representation of the differences between two pieces
of text.
"""
highlight_regex = re.compile(r'([\+\-\^]+)')
def __init__(self, source, proposal):
proposal = htmlescape(proposal)
differ = Differ()
self.diff = list(differ.compare(source.splitlines(1),
proposal.splitlines(1)))
def make_text(self):
return '\n'.join(self.diff)
def make_html(self):
"""Return the HTML representation of the differences between
`source` and `proposal`.
:param source: the original text
:param proposal: the proposed text
"""
html = []
diff = self.diff[:]
line = diff.pop(0)
next = diff.pop(0)
while True:
html.append(self._handle_line(line, next))
line = next
try:
next = diff.pop(0)
except IndexError:
html.append(self._handle_line(line))
break
return ''.join(html).rstrip()
def _handle_line(self, line, next=None):
"""Handle an individual line in a diff."""
prefix = line[0]
text = line[2:]
if prefix == ' ':
return text
elif prefix == '?':
return ''
if next is not None and next[0] == '?':
tag = prefix == '+' and 'ins' or 'del'
text = self._highlight_text(text, next, tag)
css_class = prefix == '+' and 'prop-added' or 'prop-removed'
return '<span class="%s">%s</span>\n' % (css_class, text.rstrip())
def _highlight_text(self, text, next, tag):
"""Highlight the specific changes made to a line by adding
<ins> and <del> tags.
"""
next = next[2:]
new_text = []
start = 0
for match in self.highlight_regex.finditer(next):
new_text.append(text[start:match.start()])
new_text.append('<%s>' % tag)
new_text.append(text[match.start():match.end()])
new_text.append('</%s>' % tag)
start = match.end()
new_text.append(text[start:])
return ''.join(new_text)
| bsd-2-clause |
Lektorium-LLC/edx-platform | common/lib/xmodule/xmodule/tests/test_bulk_assertions.py | 173 | 5627 | import ddt
import itertools
from xmodule.tests import BulkAssertionTest, BulkAssertionError
STATIC_PASSING_ASSERTIONS = (
('assertTrue', True),
('assertFalse', False),
('assertIs', 1, 1),
('assertEqual', 1, 1),
('assertEquals', 1, 1),
('assertIsNot', 1, 2),
('assertIsNone', None),
('assertIsNotNone', 1),
('assertIn', 1, (1, 2, 3)),
('assertNotIn', 5, (1, 2, 3)),
('assertIsInstance', 1, int),
('assertNotIsInstance', '1', int),
('assertItemsEqual', [1, 2, 3], [3, 2, 1])
)
STATIC_FAILING_ASSERTIONS = (
('assertTrue', False),
('assertFalse', True),
('assertIs', 1, 2),
('assertEqual', 1, 2),
('assertEquals', 1, 2),
('assertIsNot', 1, 1),
('assertIsNone', 1),
('assertIsNotNone', None),
('assertIn', 5, (1, 2, 3)),
('assertNotIn', 1, (1, 2, 3)),
('assertIsInstance', '1', int),
('assertNotIsInstance', 1, int),
('assertItemsEqual', [1, 1, 1], [1, 1])
)
CONTEXT_PASSING_ASSERTIONS = (
('assertRaises', KeyError, {}.__getitem__, '1'),
('assertRaisesRegexp', KeyError, "1", {}.__getitem__, '1'),
)
CONTEXT_FAILING_ASSERTIONS = (
('assertRaises', ValueError, lambda: None),
('assertRaisesRegexp', KeyError, "2", {}.__getitem__, '1'),
)
@ddt.ddt
class TestBulkAssertionTestCase(BulkAssertionTest):
# We have to use assertion methods from the base UnitTest class,
# so we make a number of super calls that skip BulkAssertionTest.
# pylint: disable=bad-super-call
def _run_assertion(self, assertion_tuple):
"""
Run the supplied tuple of (assertion, *args) as a method on this class.
"""
assertion, args = assertion_tuple[0], assertion_tuple[1:]
getattr(self, assertion)(*args)
def _raw_assert(self, assertion_name, *args, **kwargs):
"""
Run an un-modified assertion.
"""
# Use super(BulkAssertionTest) to make sure we get un-adulturated assertions
return getattr(super(BulkAssertionTest, self), 'assert' + assertion_name)(*args, **kwargs)
@ddt.data(*(STATIC_PASSING_ASSERTIONS + CONTEXT_PASSING_ASSERTIONS))
def test_passing_asserts_passthrough(self, assertion_tuple):
self._run_assertion(assertion_tuple)
@ddt.data(*(STATIC_FAILING_ASSERTIONS + CONTEXT_FAILING_ASSERTIONS))
def test_failing_asserts_passthrough(self, assertion_tuple):
with self._raw_assert('Raises', AssertionError) as context:
self._run_assertion(assertion_tuple)
self._raw_assert('NotIsInstance', context.exception, BulkAssertionError)
@ddt.data(*CONTEXT_PASSING_ASSERTIONS)
@ddt.unpack
def test_passing_context_assertion_passthrough(self, assertion, *args):
assertion_args = []
args = list(args)
exception = args.pop(0)
while not callable(args[0]):
assertion_args.append(args.pop(0))
function = args.pop(0)
with getattr(self, assertion)(exception, *assertion_args):
function(*args)
@ddt.data(*CONTEXT_FAILING_ASSERTIONS)
@ddt.unpack
def test_failing_context_assertion_passthrough(self, assertion, *args):
assertion_args = []
args = list(args)
exception = args.pop(0)
while not callable(args[0]):
assertion_args.append(args.pop(0))
function = args.pop(0)
with self._raw_assert('Raises', AssertionError) as context:
with getattr(self, assertion)(exception, *assertion_args):
function(*args)
self._raw_assert('NotIsInstance', context.exception, BulkAssertionError)
@ddt.data(*list(itertools.product(
CONTEXT_PASSING_ASSERTIONS,
CONTEXT_FAILING_ASSERTIONS,
CONTEXT_FAILING_ASSERTIONS
)))
@ddt.unpack
def test_bulk_assert(self, passing_assertion, failing_assertion1, failing_assertion2):
contextmanager = self.bulk_assertions()
contextmanager.__enter__()
self._run_assertion(passing_assertion)
self._run_assertion(failing_assertion1)
self._run_assertion(failing_assertion2)
with self._raw_assert('Raises', BulkAssertionError) as context:
contextmanager.__exit__(None, None, None)
self._raw_assert('Equals', len(context.exception.errors), 2)
@ddt.data(*list(itertools.product(
CONTEXT_FAILING_ASSERTIONS
)))
@ddt.unpack
def test_nested_bulk_asserts(self, failing_assertion):
with self._raw_assert('Raises', BulkAssertionError) as context:
with self.bulk_assertions():
self._run_assertion(failing_assertion)
with self.bulk_assertions():
self._run_assertion(failing_assertion)
self._run_assertion(failing_assertion)
self._raw_assert('Equal', len(context.exception.errors), 3)
@ddt.data(*list(itertools.product(
CONTEXT_PASSING_ASSERTIONS,
CONTEXT_FAILING_ASSERTIONS,
CONTEXT_FAILING_ASSERTIONS
)))
@ddt.unpack
def test_bulk_assert_closed(self, passing_assertion, failing_assertion1, failing_assertion2):
with self._raw_assert('Raises', BulkAssertionError) as context:
with self.bulk_assertions():
self._run_assertion(passing_assertion)
self._run_assertion(failing_assertion1)
self._raw_assert('Equals', len(context.exception.errors), 1)
with self._raw_assert('Raises', AssertionError) as context:
self._run_assertion(failing_assertion2)
self._raw_assert('NotIsInstance', context.exception, BulkAssertionError)
| agpl-3.0 |
waynesun09/tp-libvirt | libvirt/tests/src/virsh_cmd/host/virsh_freepages.py | 4 | 4927 | import logging
from autotest.client.shared import error
from virttest import virsh
from virttest import test_setup
from virttest import utils_misc
from virttest.utils_test import libvirt as utlv
def check_freepages(output, expect_result_list):
"""
Check output of virsh freepages, as the freepages of default pagesize
changing all the time, so only check huge page here.
:params output: Virsh cmd output, which has 3 types:
# virsh freepages --all
Node 0:
4KiB: 15268666
2048KiB: 0
Node 1:
4KiB: 15833180
2048KiB: 0
# virsh freepages --pagesize 2048 --all
Node 0:
2048KiB: 0
Node 1:
2048KiB: 0
# virsh freepages --cellno 0 --pagesize 2048
2048KiB: 0
:params expect_result_list: A list of expect result:
[{"Node": "Node 0", "2048KiB": 1}]
:return: True for check pass
"""
all_freepages = []
cell_freepages = {}
output += "\n\n"
for line in output.splitlines():
if line:
key, value = line.split(":")
if "Node" in key:
cell_freepages["Node"] = key
else:
cell_freepages[key] = value.strip()
else:
all_freepages.append(cell_freepages)
cell_freepages = {}
check_result = []
for expect in expect_result_list:
check_pass = False
if "Node" in expect.keys():
for cell in all_freepages:
logging.info(set(cell.items()))
logging.info(set(expect.items()))
if set(cell.items()) >= set(expect.items()):
check_pass = True
break
else:
if expect == all_freepages[0]:
check_pass = True
check_result.append(check_pass)
return False not in check_result
def run(test, params, env):
"""
Test virsh command virsh freepages
"""
option = params.get("freepages_option", "")
status_error = "yes" == params.get("status_error", "no")
cellno = params.get("freepages_cellno")
pagesize = params.get("freepages_page_size")
huge_pages_num = params.get("huge_pages_num")
hp_cl = test_setup.HugePageConfig(params)
default_hp_size = hp_cl.get_hugepage_size()
supported_hp_size = hp_cl.get_multi_supported_hugepage_size()
host_numa_node = utils_misc.NumaInfo()
node_list = host_numa_node.online_nodes
logging.debug("host node list is %s", node_list)
for i in node_list:
try:
hp_cl.get_node_num_huge_pages(i, default_hp_size)
except ValueError, e:
logging.warning("%s", e)
logging.debug('move node %s out of testing node list', i)
node_list.remove(i)
cellno_list = []
if cellno == "AUTO":
cellno_list = node_list
elif cellno == "OUT_OF_RANGE":
cellno_list.append(max(node_list) + 1)
else:
cellno_list.append(cellno)
pagesize_list = []
if pagesize == "AUTO":
pagesize_list = supported_hp_size
else:
pagesize_list.append(pagesize)
if huge_pages_num and not status_error:
try:
huge_pages_num = int(huge_pages_num)
except (TypeError, ValueError), e:
raise error.TestError("Huge page value %s shoule be an integer" %
huge_pages_num)
# Set huge page for positive test
for i in node_list:
hp_cl.set_node_num_huge_pages(huge_pages_num, i, default_hp_size)
# Run test
for cell in cellno_list:
for page in pagesize_list:
result = virsh.freepages(cellno=cell,
pagesize=page,
options=option,
debug=True)
utlv.check_exit_status(result, status_error)
expect_result_list = []
# Check huge pages
if (not status_error and
huge_pages_num and
page == str(default_hp_size)):
page_k = "%sKiB" % page
node_dict = {page_k: str(huge_pages_num)}
if cell is None:
for i in node_list:
tmp_dict = {}
tmp_dict['Node'] = "Node %s" % i
tmp_dict.update(node_dict)
expect_result_list.append(tmp_dict)
else:
expect_result_list = [node_dict]
if check_freepages(result.stdout.strip(), expect_result_list):
logging.info("Huge page freepages check pass")
else:
raise error.TestFail("Huge page freepages check failed,"
" expect result is %s" %
expect_result_list)
| gpl-2.0 |
rosswhitfield/javelin | javelin/unitcell.py | 1 | 8017 | """
========
unitcell
========
"""
import numpy as np
class UnitCell:
"""The UnitCell object can be set with either 1, 3 or 6 parameters
corresponding to cubic ``a`` parameters, ``(a, b, c)`` or ``(a, b,
c, alpha, beta, gamma)``, where angles are in degrees.
>>> cubic = UnitCell(5)
>>> cubic.cell
(5.0, 5.0, 5.0, 90.0, 90.0, 90.0)
>>> orthorhombic = UnitCell(5, 6, 7)
>>> orthorhombic.cell
(5.0, 6.0, 7.0, 90.0, 90.0, 90.0)
>>> unitcell = UnitCell(4.0, 3.0, 6.0, 89.0, 90.0, 97.0)
>>> unitcell.cell
(4.0, 3.0, 6.0, 89.0, 90.0, 97.0)
UnitCell objects can be set after being created simply by
>>> unitcell = UnitCell()
>>> unitcell.cell = 6
>>> unitcell.cell
(6.0, 6.0, 6.0, 90.0, 90.0, 90.0)
>>> unitcell.cell = 3, 4, 5
>>> unitcell.cell
(3.0, 4.0, 5.0, 90.0, 90.0, 90.0)
>>> unitcell.cell = 6, 7, 8, 91.0, 90, 89
>>> unitcell.cell
(6.0, 7.0, 8.0, 91.0, 90.0, 89.0)
>>> # or using a list or tuple
>>> unitcell.cell = [8, 7, 6, 89, 90, 90]
>>> unitcell.cell
(8.0, 7.0, 6.0, 89.0, 90.0, 90.0)
"""
def __init__(self, *args):
self.a = 1
self.b = 1
self.c = 1
self.alpha = np.radians(90)
self.beta = np.radians(90)
self.gamma = np.radians(90)
self.ra = 1 # a*
self.rb = 1 # b*
self.rc = 1 # c*
self.ralpha = np.radians(90) # alpha*
self.rbeta = np.radians(90) # beta*
self.rgamma = np.radians(90) # gamma*
self.__G = np.eye(3)
self.__Gstar = np.eye(3)
self.__B = np.eye(3)
if args:
self.cell = args
def __eq__(self, other):
return self.cell == other.cell
def __repr__(self):
return "a={}, b={}, c={}, alpha={}, beta={}, gamma={}".format(*self.cell)
def cartesian(self, u):
"""Return Cartesian coordinates of a lattice vector.
>>> unitcell = UnitCell(3,4,5,90,90,120)
>>> unitcell.cartesian([1,0,0])
array([ 2.59807621e+00, -1.50000000e+00, 3.25954010e-16])
A array of atoms position can also be passed
>>> positions = [[1,0,0], [0,0,0.5]]
>>> unitcell.cartesian(positions)
array([[ 2.59807621e+00, -1.50000000e+00, 3.25954010e-16],
[ 0.00000000e+00, 0.00000000e+00, 2.50000000e+00]])
"""
return np.dot(u, self.Binv)
@property
def cell(self):
"""Return the unit cell parameters (*a*, *b*, *c*, *alpha*, *beta*,
*gamma*) in degrees.
"""
return (self.a, self.b, self.c,
np.degrees(self.alpha),
np.degrees(self.beta),
np.degrees(self.gamma))
@cell.setter
def cell(self, *args):
"""Sets the unit cell with either 1, 3 or 6 parameters corresponding
to cubic ``a`` parameters, ``(a, b, c)`` or ``(a, b, c, alpha,
beta, gamma)``, where angles are in degrees
"""
args = np.asarray(args).flatten()
if args.size == 1: # cubic
self.a = self.b = self.c = np.float(args)
self.alpha = self.beta = self.gamma = np.radians(90)
elif args.size == 3: # orthorhombic
a, b, c = args[0:3]
self.a = 1 if a == 0 else np.float(a)
self.b = 1 if b == 0 else np.float(b)
self.c = 1 if c == 0 else np.float(c)
self.alpha = self.beta = self.gamma = np.radians(90)
elif args.size == 6:
a, b, c = args[0:3]
self.a = 1 if a == 0 else np.float(a)
self.b = 1 if b == 0 else np.float(b)
self.c = 1 if c == 0 else np.float(c)
self.alpha = np.radians(args[3])
self.beta = np.radians(args[4])
self.gamma = np.radians(args[5])
else:
raise ValueError("Invalid number of variables, unit cell unchanged")
self.__calculateG()
self.__calculateReciprocalLattice()
self.__calculateB()
def fractional(self, u):
"""Return Cartesian coordinates of a lattice vector.
>>> unitcell = UnitCell(3,4,5,90,90,120)
>>> unitcell.fractional([0,4,0])
array([ 0.00000000e+00, 1.00000000e+00, -4.89858720e-17])
A array of atoms position can also be passed
>>> positions = [[0,2,0], [0,0,5]]
>>> unitcell.fractional(positions)
array([[ 0.00000000e+00, 5.00000000e-01, -2.44929360e-17],
[ 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])
"""
return (u @ self.B)
@property
def G(self):
"""Returns the metric tensor **G**"""
return self.__G
@property
def Gstar(self):
"""Returns the reciprocal metric tensor **G***"""
return np.linalg.inv(self.G)
@property
def B(self):
"""Returns the **B** matrix"""
return self.__B
@property
def Binv(self):
"""Returns the inverse **B** matrix"""
return np.linalg.inv(self.B)
def dstar(self, h, k, l):
"""Returns d*=1/d for given h,k,l"""
return np.linalg.norm(self.B @ np.array([[h],
[k],
[l]]))
def d(self, h, k, l):
"""Returns d-spacing for given h,k,l"""
return 1/self.dstar(h, k, l)
def recAngle(self, h1, k1, l1, h2, k2, l2, degrees=False):
"""Calculates the angle between two reciprocal vectors"""
q1 = np.array([[h1], [k1], [l1]])
q2 = np.array([[h2], [k2], [l2]])
q1 = self.Gstar @ q1
E = (q1.T @ q2).sum()
angle = np.arccos(E / (self.dstar(h1, k1, l1) * self.dstar(h2, k2, l2)))
if degrees:
return np.degrees(angle)
else:
return angle
@property
def volume(self):
"""Returns the unit cell volume"""
return np.sqrt(np.linalg.det(self.__G))
@property
def reciprocalVolume(self):
"""Returns the unit cell reciprocal volume"""
return np.sqrt(np.linalg.det(self.Gstar))
@property
def reciprocalCell(self):
"""Return the reciprocal unit cell parameters (*a**, *b**, *c**,
*alpha**, *beta**, *gamma**) in degrees.
"""
return (self.ra, self.rb, self.rc,
np.degrees(self.ralpha),
np.degrees(self.rbeta),
np.degrees(self.rgamma))
def __calculateReciprocalLattice(self):
"""Calculates the reciropcal lattice from G*"""
Gstar = self.Gstar
self.ra = np.sqrt(Gstar[0, 0])
self.rb = np.sqrt(Gstar[1, 1])
self.rc = np.sqrt(Gstar[2, 2])
self.ralpha = np.arccos(Gstar[1, 2] / (self.rb*self.rc))
self.rbeta = np.arccos(Gstar[0, 2] / (self.ra*self.rc))
self.rgamma = np.arccos(Gstar[0, 1] / (self.ra*self.rb))
def __calculateG(self):
"""Calculates the metric tensor from unti cell parameters"""
if ((self.alpha > self.beta + self.gamma) or
(self.beta > self.alpha + self.gamma) or
(self.gamma > self.alpha + self.beta)):
raise ValueError("Invalid angles")
ca = np.cos(self.alpha)
cb = np.cos(self.beta)
cg = np.cos(self.gamma)
self.__G = np.array([[self.a**2, self.a * self.b * cg, self.a * self.c * cb],
[self.a * self.b * cg, self.b**2, self.b * self.c * ca],
[self.a * self.c * cb, self.b * self.c * ca, self.c**2]])
def __calculateB(self):
"""Calculated B matrix from lattice vectors"""
self.__B = np.array([[self.ra, self.rb * np.cos(self.rgamma),
self.rc * np.cos(self.rbeta)],
[0, self.rb * np.sin(self.rgamma),
- self.rc * np.sin(self.rbeta) * np.cos(self.alpha)],
[0, 0, 1/self.c]])
| mit |
treyhunner/pelican | pelican/tests/default_conf.py | 18 | 1373 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
AUTHOR = 'Alexis Métaireau'
SITENAME = "Alexis' log"
SITEURL = 'http://blog.notmyidea.org'
TIMEZONE = 'UTC'
GITHUB_URL = 'http://github.com/ametaireau/'
DISQUS_SITENAME = "blog-notmyidea"
PDF_GENERATOR = False
REVERSE_CATEGORY_ORDER = True
DEFAULT_PAGINATION = 2
FEED_RSS = 'feeds/all.rss.xml'
CATEGORY_FEED_RSS = 'feeds/%s.rss.xml'
LINKS = (('Biologeek', 'http://biologeek.org'),
('Filyb', "http://filyb.info/"),
('Libert-fr', "http://www.libert-fr.com"),
('N1k0', "http://prendreuncafe.com/blog/"),
('Tarek Ziadé', "http://ziade.org/blog"),
('Zubin Mithra', "http://zubin71.wordpress.com/"),)
SOCIAL = (('twitter', 'http://twitter.com/ametaireau'),
('lastfm', 'http://lastfm.com/user/akounet'),
('github', 'http://github.com/ametaireau'),)
# global metadata to all the contents
DEFAULT_METADATA = {'yeah': 'it is'}
# path-specific metadata
EXTRA_PATH_METADATA = {
'extra/robots.txt': {'path': 'robots.txt'},
}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
'pictures',
'extra/robots.txt',
]
FORMATTED_FIELDS = ['summary', 'custom_formatted_field']
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
foobar = "barbaz"
| agpl-3.0 |
Zumium/boxes | boxes/main.py | 1 | 2513 | #! /usr/bin/env python3
#Copyright (C) 2016 Zumium martin007323@gmail.com
#
#
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
from boxes import argsParse,configParse
from boxes.handlers import *
import xml.etree.ElementTree as ET
import configparser
import os.path
def main():
#get command line arguments in xml tree format
xmlCmdArgs=argsParse.parseCmdline()
#get configfile
configPath=configParse.getConfigPath()
config=configParse.readConfig(configPath)
#get folders and archives path and pack them into xml
folderPath=config['Default']['FolderPath']
archivePath=config['Default']['ArchivePath']
#storage=ET.Element('storage')
#ET.SubElement(storage,'folderPath').text=os.path.expanduser(folderPath) #Notice: in case that the user uses '~' symbol
#ET.SubElement(storage,'archivePath').text=os.path.expanduser(archivePath)
#
#operaHandlers={'create':create.handle,'drop':drop.handle,'list-boxes':list_boxes.handle,'list-arch':list_archives.handle,'list':list_boxfile.handle,'add':add_file.handle}
operaHandlers={'create':create.CreateHandler,'drop':drop.DropHandler,'list-file':list_boxfile.ListBoxfileHandler,'list-boxes':list_boxes.ListBoxesHandler,'list-arch':list_archives.ListArchivesHandler,'add':add_file.AddFileHandler,'path':path.PathHandler,'del':del_file.DelFileHandler,'link':link.LinkHandler,'unlink':unlink.UnlinkHandler,'fresh':fresh.FreshHandler,'archive':archive.ArchiveHandler,'unarchive':unarchive.UnarchiveHandler,'import':import_box.ImportHandler,'export':export_box.ExportHandler,'list':list_allboxes.ListHandler,'help':help_msg.HelpHandler}
cmdAction=xmlCmdArgs.find('action').text
handlerInstance=operaHandlers[cmdAction]()
handlerInstance.setBoxPath(folderPath,archivePath)
handlerInstance.putArgument(xmlCmdArgs)
handlerInstance.handle()
| apache-2.0 |
MotorolaMobilityLLC/external-chromium_org | chrome/test/functional/autofill.py | 65 | 9323 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import pickle
import re
import simplejson
import pyauto_functional # Must be imported before pyauto
import pyauto
import test_utils
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from webdriver_pages import settings
class AutofillTest(pyauto.PyUITest):
"""Tests that autofill UI works correctly. Also contains a manual test for
the crowdsourcing server."""
def setUp(self):
pyauto.PyUITest.setUp(self)
self._driver = self.NewWebDriver()
def AutofillCrowdsourcing(self):
"""Test able to send POST request of web form to Autofill server.
The Autofill server processes the data offline, so it can take a few days
for the result to be detectable. Manual verification is required.
"""
# HTML file needs to be run from a specific http:// url to be able to verify
# the results a few days later by visiting the same url.
url = 'http://www.corp.google.com/~dyu/autofill/crowdsourcing-test.html'
# Autofill server captures 2.5% of the data posted.
# Looping 1000 times is a safe minimum to exceed the server's threshold or
# noise.
for i in range(1000):
fname = 'David'
lname = 'Yu'
email = 'david.yu@gmail.com'
# Submit form to collect crowdsourcing data for Autofill.
self.NavigateToURL(url, 0, 0)
profile = {'fn': fname, 'ln': lname, 'em': email}
js = ''.join(['document.getElementById("%s").value = "%s";' %
(key, value) for key, value in profile.iteritems()])
js += 'document.getElementById("testform").submit();'
self.ExecuteJavascript(js)
def _SelectOptionXpath(self, value):
"""Returns an xpath query used to select an item from a dropdown list.
Args:
value: Option selected for the drop-down list field.
Returns:
The value of the xpath query.
"""
return '//option[@value="%s"]' % value
def testPostalCodeAndStateLabelsBasedOnCountry(self):
"""Verify postal code and state labels based on selected country."""
data_file = os.path.join(self.DataDir(), 'autofill', 'functional',
'state_zip_labels.txt')
test_data = simplejson.loads(open(data_file).read())
page = settings.AutofillEditAddressDialog.FromNavigation(self._driver)
# Initial check of State and ZIP labels.
self.assertEqual('State', page.GetStateLabel())
self.assertEqual('ZIP code', page.GetPostalCodeLabel())
for country_code in test_data:
page.Fill(country_code=country_code)
# Compare postal code labels.
actual_postal_label = page.GetPostalCodeLabel()
self.assertEqual(
test_data[country_code]['postalCodeLabel'],
actual_postal_label,
msg=('Postal code label "%s" does not match Country "%s"' %
(actual_postal_label, country_code)))
# Compare state labels.
actual_state_label = page.GetStateLabel()
self.assertEqual(
test_data[country_code]['stateLabel'],
actual_state_label,
msg=('State label "%s" does not match Country "%s"' %
(actual_state_label, country_code)))
def testNoDuplicatePhoneNumsInPrefs(self):
"""Test duplicate phone numbers entered in prefs are removed."""
page = settings.AutofillEditAddressDialog.FromNavigation(self._driver)
non_duplicates = ['111-1111', '222-2222']
duplicates = ['111-1111']
page.Fill(phones=non_duplicates + duplicates)
self.assertEqual(non_duplicates, page.GetPhones(),
msg='Duplicate phone number in prefs unexpectedly saved.')
def testDisplayLineItemForEntriesWithNoCCNum(self):
"""Verify Autofill creates a line item for CC entries with no CC number."""
self.NavigateToURL('chrome://settings-frame/autofillEditCreditCard')
self._driver.find_element_by_id('name-on-card').send_keys('Jane Doe')
query_month = self._SelectOptionXpath('12')
query_year = self._SelectOptionXpath('2014')
self._driver.find_element_by_id('expiration-month').find_element_by_xpath(
query_month).click()
self._driver.find_element_by_id('expiration-year').find_element_by_xpath(
query_year).click()
self._driver.find_element_by_id(
'autofill-edit-credit-card-apply-button').click()
# Refresh the page to ensure the UI is up-to-date.
self._driver.refresh()
list_entry = self._driver.find_element_by_class_name('autofill-list-item')
self.assertTrue(list_entry.is_displayed)
self.assertEqual('Jane Doe', list_entry.text,
msg='Saved CC line item not same as what was entered.')
def _GetElementList(self, container_elem, fields_to_select):
"""Returns all sub elements of specific characteristics.
Args:
container_elem: An element that contains other elements.
fields_to_select: A list of fields to select with strings that
help create an xpath string, which in turn identifies
the elements needed.
For example: ['input', 'button']
['div[@id]', 'button[@disabled]']
['*[class="example"]']
Returns:
List of all subelements found in the container element.
"""
self.assertTrue(fields_to_select, msg='No fields specified for selection.')
fields_to_select = ['.//' + field for field in fields_to_select]
xpath_arg = ' | '.join(fields_to_select)
field_elems = container_elem.find_elements_by_xpath(xpath_arg)
return field_elems
def _GetElementInfo(self, element):
"""Returns visual comprehensive info about an element.
This function identifies the text of the correspoinding label when tab
ordering fails.
This info consists of:
The labels, buttons, ids, placeholder attribute values, or the element id.
Args:
element: The target element.
Returns:
A string that identifies the element in the page.
"""
element_info = ''
if element.tag_name == 'button':
element_info = element.text
element_info = (element_info or element.get_attribute('id') or
element.get_attribute('placeholder') or
element.get_attribute('class') or element.id)
return '%s: %s' % (element.tag_name, element_info)
def _LoadPageAndGetFieldList(self):
"""Navigate to autofillEditAddress page and finds the elements with focus.
These elements are of input, select, and button types.
Returns:
A list with all elements that can receive focus.
"""
url = 'chrome://settings-frame/autofillEditAddress'
self._driver.get(url)
container_elem = self._driver.find_element_by_id(
'autofill-edit-address-overlay')
# The container element contains input, select and button fields. Some of
# the buttons are disabled so they are ignored.
field_list = self._GetElementList(container_elem,
['input', 'select',
'button[not(@disabled)]'])
self.assertTrue(field_list, 'No fields found in "%s".' % url)
return field_list
def testTabOrderForEditAddress(self):
"""Verify the TAB ordering for Edit Address page is correct."""
tab_press = ActionChains(self._driver).send_keys(Keys.TAB)
field_list = self._LoadPageAndGetFieldList()
# Creates a dictionary where a field key returns the value of the next field
# in the field list. The last field of the field list is mapped to the first
# field of the field list.
field_nextfield_dict = dict(
zip(field_list, field_list[1:] + field_list[:1]))
# Wait until a field of |field_list| has received the focus.
self.WaitUntil(lambda:
self._driver.switch_to_active_element().id in
[f.id for f in field_list])
# The first field is expected to receive the focus.
self.assertEqual(self._driver.switch_to_active_element().id,
field_list[0].id,
msg='The first field did not receive tab focus.')
for field in field_list:
tab_press.perform()
# Wait until a field of |field_list|, other than the current field, has
# received the focus.
self.WaitUntil(lambda:
self._driver.switch_to_active_element().id != field.id and
self._driver.switch_to_active_element().id in
[f.id for f in field_list])
self.assertEqual(self._driver.switch_to_active_element().id,
field_nextfield_dict[field].id,
msg=('The TAB ordering is broken. Previous field: "%s"\n'
'Field expected to receive focus: "%s"\n'
'Field that received focus instead: "%s"')
% (self._GetElementInfo(field),
self._GetElementInfo(field_nextfield_dict[field]),
self._GetElementInfo(
self._driver.switch_to_active_element())))
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |
postlund/home-assistant | homeassistant/components/geonetnz_volcano/sensor.py | 3 | 5915 | """Feed Entity Manager Sensor support for GeoNet NZ Volcano Feeds."""
import logging
from typing import Optional
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_UNIT_SYSTEM_IMPERIAL,
LENGTH_KILOMETERS,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.util import dt
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from .const import (
ATTR_ACTIVITY,
ATTR_DISTANCE,
ATTR_EXTERNAL_ID,
ATTR_HAZARDS,
DEFAULT_ICON,
DOMAIN,
FEED,
SIGNAL_UPDATE_ENTITY,
)
_LOGGER = logging.getLogger(__name__)
ATTR_LAST_UPDATE = "feed_last_update"
ATTR_LAST_UPDATE_SUCCESSFUL = "feed_last_update_successful"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the GeoNet NZ Volcano Feed platform."""
manager = hass.data[DOMAIN][FEED][entry.entry_id]
@callback
def async_add_sensor(feed_manager, external_id, unit_system):
"""Add sensor entity from feed."""
new_entity = GeonetnzVolcanoSensor(
entry.entry_id, feed_manager, external_id, unit_system
)
_LOGGER.debug("Adding sensor %s", new_entity)
async_add_entities([new_entity], True)
manager.listeners.append(
async_dispatcher_connect(
hass, manager.async_event_new_entity(), async_add_sensor
)
)
hass.async_create_task(manager.async_update())
_LOGGER.debug("Sensor setup done")
class GeonetnzVolcanoSensor(Entity):
"""This represents an external event with GeoNet NZ Volcano feed data."""
def __init__(self, config_entry_id, feed_manager, external_id, unit_system):
"""Initialize entity with data from feed entry."""
self._config_entry_id = config_entry_id
self._feed_manager = feed_manager
self._external_id = external_id
self._unit_system = unit_system
self._title = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._alert_level = None
self._activity = None
self._hazards = None
self._feed_last_update = None
self._feed_last_update_successful = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_update = async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_ENTITY.format(self._external_id),
self._update_callback,
)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity will be removed from hass."""
if self._remove_signal_update:
self._remove_signal_update()
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for GeoNet NZ Volcano feed location events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
last_update = self._feed_manager.last_update()
last_update_successful = self._feed_manager.last_update_successful()
if feed_entry:
self._update_from_feed(feed_entry, last_update, last_update_successful)
def _update_from_feed(self, feed_entry, last_update, last_update_successful):
"""Update the internal state from the provided feed entry."""
self._title = feed_entry.title
# Convert distance if not metric system.
if self._unit_system == CONF_UNIT_SYSTEM_IMPERIAL:
self._distance = round(
IMPERIAL_SYSTEM.length(feed_entry.distance_to_home, LENGTH_KILOMETERS),
1,
)
else:
self._distance = round(feed_entry.distance_to_home, 1)
self._latitude = round(feed_entry.coordinates[0], 5)
self._longitude = round(feed_entry.coordinates[1], 5)
self._attribution = feed_entry.attribution
self._alert_level = feed_entry.alert_level
self._activity = feed_entry.activity
self._hazards = feed_entry.hazards
self._feed_last_update = dt.as_utc(last_update) if last_update else None
self._feed_last_update_successful = (
dt.as_utc(last_update_successful) if last_update_successful else None
)
@property
def state(self):
"""Return the state of the sensor."""
return self._alert_level
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return DEFAULT_ICON
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return f"Volcano {self._title}"
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "alert level"
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_ATTRIBUTION, self._attribution),
(ATTR_ACTIVITY, self._activity),
(ATTR_HAZARDS, self._hazards),
(ATTR_LONGITUDE, self._longitude),
(ATTR_LATITUDE, self._latitude),
(ATTR_DISTANCE, self._distance),
(ATTR_LAST_UPDATE, self._feed_last_update),
(ATTR_LAST_UPDATE_SUCCESSFUL, self._feed_last_update_successful),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
| apache-2.0 |
xbs13/influxdb-python | influxdb/tests/server_tests/influxdb_instance.py | 3 | 5519 | # -*- coding: utf-8 -*-
from __future__ import print_function
import datetime
import os
import tempfile
import distutils
import time
import shutil
import subprocess
import unittest
import sys
from influxdb.tests.misc import get_free_port, is_port_open
class InfluxDbInstance(object):
""" A class to launch of fresh influxdb server instance
in a temporary place, using a config file template.
"""
def __init__(self,
conf_template,
udp_enabled=False):
if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
raise unittest.SkipTest(
"Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
)
self.influxd_path = self.find_influxd_path()
# create a temporary dir to store all needed files
# for the influxdb server instance :
self.temp_dir_base = tempfile.mkdtemp()
# "temp_dir_base" will be used for conf file and logs,
# while "temp_dir_influxdb" is for the databases files/dirs :
tempdir = self.temp_dir_influxdb = tempfile.mkdtemp(
dir=self.temp_dir_base)
# find a couple free ports :
ports = dict(
http_port=get_free_port(),
admin_port=get_free_port(),
meta_port=get_free_port(),
udp_port=get_free_port() if udp_enabled else -1,
)
conf_data = dict(
meta_dir=os.path.join(tempdir, 'meta'),
data_dir=os.path.join(tempdir, 'data'),
cluster_dir=os.path.join(tempdir, 'state'),
handoff_dir=os.path.join(tempdir, 'handoff'),
logs_file=os.path.join(self.temp_dir_base, 'logs.txt'),
udp_enabled='true' if udp_enabled else 'false',
)
conf_data.update(ports)
self.__dict__.update(conf_data)
conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf')
with open(conf_file, "w") as fh:
with open(conf_template) as fh_template:
fh.write(fh_template.read().format(**conf_data))
# now start the server instance:
self.proc = subprocess.Popen(
[self.influxd_path, '-config', conf_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
print(
"%s > Started influxdb bin in %r with ports %s and %s.." % (
datetime.datetime.now(),
self.temp_dir_base,
self.admin_port,
self.http_port
)
)
# wait for it to listen on the broker and admin ports:
# usually a fresh instance is ready in less than 1 sec ..
timeout = time.time() + 10 # so 10 secs should be enough,
# otherwise either your system load is high,
# or you run a 286 @ 1Mhz ?
try:
while time.time() < timeout:
if (is_port_open(self.http_port)
and is_port_open(self.admin_port)):
# it's hard to check if a UDP port is open..
if udp_enabled:
# so let's just sleep 0.5 sec in this case
# to be sure that the server has open the port
time.sleep(0.5)
break
time.sleep(0.5)
if self.proc.poll() is not None:
raise RuntimeError('influxdb prematurely exited')
else:
self.proc.terminate()
self.proc.wait()
raise RuntimeError('Timeout waiting for influxdb to listen'
' on its ports (%s)' % ports)
except RuntimeError as err:
data = self.get_logs_and_output()
data['reason'] = str(err)
data['now'] = datetime.datetime.now()
raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n"
"stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r"
% data)
def find_influxd_path(self):
influxdb_bin_path = os.environ.get(
'INFLUXDB_PYTHON_INFLUXD_PATH',
None
)
if influxdb_bin_path is None:
influxdb_bin_path = distutils.spawn.find_executable('influxd')
if not influxdb_bin_path:
try:
influxdb_bin_path = subprocess.check_output(
['which', 'influxdb']
).strip()
except subprocess.CalledProcessError:
# fallback on :
influxdb_bin_path = '/opt/influxdb/influxd'
if not os.path.isfile(influxdb_bin_path):
raise unittest.SkipTest("Could not find influxd binary")
version = subprocess.check_output([influxdb_bin_path, 'version'])
print("InfluxDB version: %s" % version, file=sys.stderr)
return influxdb_bin_path
def get_logs_and_output(self):
proc = self.proc
try:
with open(self.logs_file) as fh:
logs = fh.read()
except IOError as err:
logs = "Couldn't read logs: %s" % err
return {
'rc': proc.returncode,
'out': proc.stdout.read(),
'err': proc.stderr.read(),
'logs': logs
}
def close(self, remove_tree=True):
self.proc.terminate()
self.proc.wait()
if remove_tree:
shutil.rmtree(self.temp_dir_base)
| mit |
mvcsantos/QGIS | python/plugins/processing/gui/FileSelectionPanel.py | 5 | 3036 | # -*- coding: utf-8 -*-
"""
***************************************************************************
FileSelectionPanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtGui import QFileDialog
from PyQt4.QtCore import QSettings
from processing.tools.system import isWindows
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'widgetBaseSelector.ui'))
class FileSelectionPanel(BASE, WIDGET):
def __init__(self, isFolder, ext=None):
super(FileSelectionPanel, self).__init__(None)
self.setupUi(self)
self.ext = ext or '*'
self.isFolder = isFolder
self.btnSelect.clicked.connect(self.showSelectionDialog)
def showSelectionDialog(self):
# Find the file dialog's working directory
settings = QSettings()
text = self.leText.text()
if os.path.isdir(text):
path = text
elif os.path.isdir(os.path.dirname(text)):
path = os.path.dirname(text)
elif settings.contains('/Processing/LastInputPath'):
path = settings.value('/Processing/LastInputPath')
else:
path = ''
if self.isFolder:
folder = QFileDialog.getExistingDirectory(self,
self.tr('Select folder'), path)
if folder:
self.leText.setText(folder)
settings.setValue('/Processing/LastInputPath',
os.path.dirname(folder))
else:
filenames = QFileDialog.getOpenFileNames(self,
self.tr('Select file'), path, '*.' + self.ext)
if filenames:
self.leText.setText(u';'.join(filenames))
settings.setValue('/Processing/LastInputPath',
os.path.dirname(filenames[0]))
def getValue(self):
s = self.leText.text()
if isWindows():
s = s.replace('\\', '/')
return s
def setText(self, text):
self.leText.setText(text)
| gpl-2.0 |
wakermahmud/nw.js | tools/build_native_modules.py | 85 | 1916 | #!/usr/bin/env python
import os, re, sys
import subprocess
native_modules = ['nw_test_loop_without_handle',
'bignum',
'dtrace-provider',
'ref',
'lame',
];
script_dir = os.path.dirname(__file__)
native_root = os.path.join(script_dir, os.pardir, 'tests', 'node_modules')
native_root = os.path.normpath(native_root)
native_root = os.path.abspath(native_root)
nw_gyp_script = os.path.normpath(
os.path.join(script_dir,
os.pardir,
'tests',
'node_modules',
'nw-gyp',
'bin',
'nw-gyp.js'))
nw_gyp_script = os.path.abspath(nw_gyp_script)
cur_dir = os.getcwd()
#get node-webkit target version
nw_readme_md = os.path.join(os.path.dirname(__file__), '..', 'README.md')
f = open(nw_readme_md)
for line in f:
if re.match('\[v\d*.\d*.\d*[\s\S]*\]', line):
target = line.split()[0][2:]
break
import optparse
parser = optparse.OptionParser()
parser.add_option('-t','--target',
help='the node-webkit verison')
opts, args = parser.parse_args()
if opts.target:
target = opts.target
exec_args = ['nw-gyp',
'configure',
'--target=%s'%(target),
'build']
win = sys.platform in ('win32', 'cygwin')
#We need to rebuild a submodule in http-auth
apache_crypt_path = os.path.join(
script_dir,
"..",
"tests",
"node_modules",
"http-auth",
"node_modules",
"htpasswd",
"node_modules",
"apache-crypt")
os.chdir(apache_crypt_path)
subprocess.call(exec_args)
for dir in native_modules:
if dir == 'bignum' and win:
continue
native_dir = os.path.join(native_root, dir)
os.chdir(native_dir)
#exec_args[1] = os.path.relpath(nw_gyp_script, os.getcwd())
subprocess.call(exec_args)
#os.execl(node_gyp_script, '', 'build')
os.chdir(cur_dir)
| mit |
zhjunlang/kbengine | kbe/src/lib/python/Lib/test/test_list.py | 90 | 4222 | import sys
from test import support, list_tests
import pickle
class ListTest(list_tests.CommonTest):
type2test = list
def test_basic(self):
self.assertEqual(list([]), [])
l0_3 = [0, 1, 2, 3]
l0_3_bis = list(l0_3)
self.assertEqual(l0_3, l0_3_bis)
self.assertTrue(l0_3 is not l0_3_bis)
self.assertEqual(list(()), [])
self.assertEqual(list((0, 1, 2, 3)), [0, 1, 2, 3])
self.assertEqual(list(''), [])
self.assertEqual(list('spam'), ['s', 'p', 'a', 'm'])
if sys.maxsize == 0x7fffffff:
# This test can currently only work on 32-bit machines.
# XXX If/when PySequence_Length() returns a ssize_t, it should be
# XXX re-enabled.
# Verify clearing of bug #556025.
# This assumes that the max data size (sys.maxint) == max
# address size this also assumes that the address size is at
# least 4 bytes with 8 byte addresses, the bug is not well
# tested
#
# Note: This test is expected to SEGV under Cygwin 1.3.12 or
# earlier due to a newlib bug. See the following mailing list
# thread for the details:
# http://sources.redhat.com/ml/newlib/2002/msg00369.html
self.assertRaises(MemoryError, list, range(sys.maxsize // 2))
# This code used to segfault in Py2.4a3
x = []
x.extend(-y for y in x)
self.assertEqual(x, [])
def test_truth(self):
super().test_truth()
self.assertTrue(not [])
self.assertTrue([42])
def test_identity(self):
self.assertTrue([] is not [])
def test_len(self):
super().test_len()
self.assertEqual(len([]), 0)
self.assertEqual(len([0]), 1)
self.assertEqual(len([0, 1, 2]), 3)
def test_overflow(self):
lst = [4, 5, 6, 7]
n = int((sys.maxsize*2+2) // len(lst))
def mul(a, b): return a * b
def imul(a, b): a *= b
self.assertRaises((MemoryError, OverflowError), mul, lst, n)
self.assertRaises((MemoryError, OverflowError), imul, lst, n)
def test_repr_large(self):
# Check the repr of large list objects
def check(n):
l = [0] * n
s = repr(l)
self.assertEqual(s,
'[' + ', '.join(['0'] * n) + ']')
check(10) # check our checking code
check(1000000)
def test_iterator_pickle(self):
# Userlist iterators don't support pickling yet since
# they are based on generators.
data = self.type2test([4, 5, 6, 7])
it = itorg = iter(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(data))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(data)[1:])
def test_reversed_pickle(self):
data = self.type2test([4, 5, 6, 7])
it = itorg = reversed(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(reversed(data)))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:])
def test_no_comdat_folding(self):
# Issue 8847: In the PGO build, the MSVC linker's COMDAT folding
# optimization causes failures in code that relies on distinct
# function addresses.
class L(list): pass
with self.assertRaises(TypeError):
(3,) + L([1,2])
def test_main(verbose=None):
support.run_unittest(ListTest)
# verify reference counting
import sys
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(ListTest)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
| lgpl-3.0 |
prasad-joshi/logfs_upstream | tools/perf/scripts/python/net_dropmonitor.py | 4235 | 1554 | # Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
| gpl-2.0 |
jonasfoe/COPASI | copasi/bindings/python/unittests/Test_CMatrix.py | 2 | 1529 | # -*- coding: utf-8 -*-
# Begin CVS Header
# $Source: /Volumes/Home/Users/shoops/cvs/copasi_dev/copasi/bindings/python/unittests/Test_CMatrix.py,v $
# $Revision: 1.7 $
# $Name: $
# $Author: gauges $
# $Date: 2008/04/21 10:27:07 $
# End CVS Header
# Copyright (C) 2008 by Pedro Mendes, Virginia Tech Intellectual
# Properties, Inc., EML Research, gGmbH, University of Heidelberg,
# and The University of Manchester.
# All rights reserved.
import COPASI
import unittest
from types import *
class Test_CMatrix(unittest.TestCase):
def setUp(self):
self.numRows=5
self.numCols=11
self.matrix=COPASI.FloatMatrix(self.numRows,self.numCols)
def test_numRows(self):
n=self.matrix.numRows()
self.assert_(type(n)==IntType)
self.assert_(n==self.numRows)
def test_numCols(self):
n=self.matrix.numCols()
self.assert_(type(n)==IntType)
self.assert_(n==self.numCols)
def test_size(self):
n=self.matrix.size()
self.assert_(type(n)==IntType)
self.assert_(n==(self.numCols*self.numRows))
def test_resize(self):
nr=13
nc=2
self.matrix.resize(nr,nc)
self.assert_(self.matrix.numRows()==nr)
self.assert_(self.matrix.numCols()==nc)
self.assert_(self.matrix.size()==nr*nc)
def suite():
tests=[
'test_numRows'
,'test_numCols'
,'test_size'
,'test_resize'
]
return unittest.TestSuite(map(Test_CMatrix,tests))
if(__name__ == '__main__'):
unittest.TextTestRunner(verbosity=2).run(suite())
| artistic-2.0 |
rhertzog/librement | src/librement/profile/forms.py | 1 | 3743 | # Copyright 2012 The Librement Developers
#
# See the AUTHORS file at the top-level directory of this distribution
# and at http://librement.net/copyright/
#
# This file is part of Librement. It is subject to the license terms in
# the LICENSE file found in the top-level directory of this distribution
# and at http://librement.net/license/. No part of Librement, including
# this file, may be copied, modified, propagated, or distributed except
# according to the terms contained in the LICENSE file.
from django import forms
from django.contrib.auth.models import User
from .enums import AccountEnum
from .models import Profile
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = (
'display_name',
'biography',
'rss_url',
)
class AccountUserForm(forms.ModelForm):
first_name = forms.CharField(max_length=30) # implicit required=True
last_name = forms.CharField(max_length=30)
class Meta:
model = User
fields = (
'first_name',
'last_name',
)
class AccountProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = (
'organisation',
'address_1',
'address_2',
'city',
'region',
'zipcode',
'country',
)
def clean_organisation(self):
val = self.cleaned_data['organisation']
if self.instance.account_type != AccountEnum.INDIVIDUAL and val == '':
raise forms.ValidationError(
"Required field for company/non-profit accounts"
)
return val
class AccountForm(dict):
def __init__(self, user, *args, **kwargs):
self.user = user
for key, klass, fn in (
('user', AccountUserForm, lambda x: x),
('profile', AccountProfileForm, lambda x: x.profile),
):
self[key] = klass(instance=fn(user), *args, **kwargs)
def save(self):
return [x.save() for x in self.values()]
def is_valid(self):
return all(x.is_valid() for x in self.values())
class URLForm(forms.ModelForm):
username = forms.RegexField(regex=r'^[\w-]+$')
class Meta:
model = User
fields = (
'username',
)
class PictureForm(forms.Form):
picture = forms.ImageField()
def __init__(self, user, *args, **kwargs):
self.user = user
super(PictureForm, self).__init__(*args, **kwargs)
def save(self):
self.user.profile.picture.save(
self.cleaned_data['picture']
)
self.user.profile.save()
class PasswordForm(forms.Form):
password_old = forms.CharField()
password = forms.CharField()
password_confirm = forms.CharField()
def __init__(self, user, *args, **kwargs):
self.user = user
super(PasswordForm, self).__init__(*args, **kwargs)
def save(self):
self.user.set_password(self.cleaned_data['password'])
self.user.save()
def clean_password_old(self):
val = self.cleaned_data['password_old']
if not self.user.check_password(val):
raise forms.ValidationError(
"Password is not correct."
)
return val
def clean_password_confirm(self):
password = self.cleaned_data.get('password', '')
password_confirm = self.cleaned_data['password_confirm']
if password != password_confirm:
raise forms.ValidationError("Passwords do not match.")
if len(password) < 8:
raise forms.ValidationError(
"Password must be at least 8 characters"
)
return password
| agpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.5.0/Lib/test/test_platform.py | 6 | 13732 | from unittest import mock
import os
import platform
import subprocess
import sys
import tempfile
import unittest
import warnings
from test import support
class PlatformTest(unittest.TestCase):
def test_architecture(self):
res = platform.architecture()
@support.skip_unless_symlink
def test_architecture_via_symlink(self): # issue3762
# On Windows, the EXE needs to know where pythonXY.dll is at so we have
# to add the directory to the path.
if sys.platform == "win32":
os.environ["Path"] = "{};{}".format(
os.path.dirname(sys.executable), os.environ["Path"])
def get(python):
cmd = [python, '-c',
'import platform; print(platform.architecture())']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
return p.communicate()
real = os.path.realpath(sys.executable)
link = os.path.abspath(support.TESTFN)
os.symlink(real, link)
try:
self.assertEqual(get(real), get(link))
finally:
os.remove(link)
def test_platform(self):
for aliased in (False, True):
for terse in (False, True):
res = platform.platform(aliased, terse)
def test_system(self):
res = platform.system()
def test_node(self):
res = platform.node()
def test_release(self):
res = platform.release()
def test_version(self):
res = platform.version()
def test_machine(self):
res = platform.machine()
def test_processor(self):
res = platform.processor()
def setUp(self):
self.save_version = sys.version
self.save_mercurial = sys._mercurial
self.save_platform = sys.platform
def tearDown(self):
sys.version = self.save_version
sys._mercurial = self.save_mercurial
sys.platform = self.save_platform
def test_sys_version(self):
# Old test.
for input, output in (
('2.4.3 (#1, Jun 21 2006, 13:54:21) \n[GCC 3.3.4 (pre 3.3.5 20040809)]',
('CPython', '2.4.3', '', '', '1', 'Jun 21 2006 13:54:21', 'GCC 3.3.4 (pre 3.3.5 20040809)')),
('IronPython 1.0.60816 on .NET 2.0.50727.42',
('IronPython', '1.0.60816', '', '', '', '', '.NET 2.0.50727.42')),
('IronPython 1.0 (1.0.61005.1977) on .NET 2.0.50727.42',
('IronPython', '1.0.0', '', '', '', '', '.NET 2.0.50727.42')),
):
# branch and revision are not "parsed", but fetched
# from sys._mercurial. Ignore them
(name, version, branch, revision, buildno, builddate, compiler) \
= platform._sys_version(input)
self.assertEqual(
(name, version, '', '', buildno, builddate, compiler), output)
# Tests for python_implementation(), python_version(), python_branch(),
# python_revision(), python_build(), and python_compiler().
sys_versions = {
("2.6.1 (r261:67515, Dec 6 2008, 15:26:00) \n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]",
('CPython', 'tags/r261', '67515'), self.save_platform)
:
("CPython", "2.6.1", "tags/r261", "67515",
('r261:67515', 'Dec 6 2008 15:26:00'),
'GCC 4.0.1 (Apple Computer, Inc. build 5370)'),
("IronPython 2.0 (2.0.0.0) on .NET 2.0.50727.3053", None, "cli")
:
("IronPython", "2.0.0", "", "", ("", ""),
".NET 2.0.50727.3053"),
("2.6.1 (IronPython 2.6.1 (2.6.10920.0) on .NET 2.0.50727.1433)", None, "cli")
:
("IronPython", "2.6.1", "", "", ("", ""),
".NET 2.0.50727.1433"),
("2.7.4 (IronPython 2.7.4 (2.7.0.40) on Mono 4.0.30319.1 (32-bit))", None, "cli")
:
("IronPython", "2.7.4", "", "", ("", ""),
"Mono 4.0.30319.1 (32-bit)"),
("2.5 (trunk:6107, Mar 26 2009, 13:02:18) \n[Java HotSpot(TM) Client VM (\"Apple Computer, Inc.\")]",
('Jython', 'trunk', '6107'), "java1.5.0_16")
:
("Jython", "2.5.0", "trunk", "6107",
('trunk:6107', 'Mar 26 2009'), "java1.5.0_16"),
("2.5.2 (63378, Mar 26 2009, 18:03:29)\n[PyPy 1.0.0]",
('PyPy', 'trunk', '63378'), self.save_platform)
:
("PyPy", "2.5.2", "trunk", "63378", ('63378', 'Mar 26 2009'),
"")
}
for (version_tag, subversion, sys_platform), info in \
sys_versions.items():
sys.version = version_tag
if subversion is None:
if hasattr(sys, "_mercurial"):
del sys._mercurial
else:
sys._mercurial = subversion
if sys_platform is not None:
sys.platform = sys_platform
self.assertEqual(platform.python_implementation(), info[0])
self.assertEqual(platform.python_version(), info[1])
self.assertEqual(platform.python_branch(), info[2])
self.assertEqual(platform.python_revision(), info[3])
self.assertEqual(platform.python_build(), info[4])
self.assertEqual(platform.python_compiler(), info[5])
def test_system_alias(self):
res = platform.system_alias(
platform.system(),
platform.release(),
platform.version(),
)
def test_uname(self):
res = platform.uname()
self.assertTrue(any(res))
self.assertEqual(res[0], res.system)
self.assertEqual(res[1], res.node)
self.assertEqual(res[2], res.release)
self.assertEqual(res[3], res.version)
self.assertEqual(res[4], res.machine)
self.assertEqual(res[5], res.processor)
@unittest.skipUnless(sys.platform.startswith('win'), "windows only test")
def test_uname_win32_ARCHITEW6432(self):
# Issue 7860: make sure we get architecture from the correct variable
# on 64 bit Windows: if PROCESSOR_ARCHITEW6432 exists we should be
# using it, per
# http://blogs.msdn.com/david.wang/archive/2006/03/26/HOWTO-Detect-Process-Bitness.aspx
try:
with support.EnvironmentVarGuard() as environ:
if 'PROCESSOR_ARCHITEW6432' in environ:
del environ['PROCESSOR_ARCHITEW6432']
environ['PROCESSOR_ARCHITECTURE'] = 'foo'
platform._uname_cache = None
system, node, release, version, machine, processor = platform.uname()
self.assertEqual(machine, 'foo')
environ['PROCESSOR_ARCHITEW6432'] = 'bar'
platform._uname_cache = None
system, node, release, version, machine, processor = platform.uname()
self.assertEqual(machine, 'bar')
finally:
platform._uname_cache = None
def test_java_ver(self):
res = platform.java_ver()
if sys.platform == 'java':
self.assertTrue(all(res))
def test_win32_ver(self):
res = platform.win32_ver()
def test_mac_ver(self):
res = platform.mac_ver()
if platform.uname().system == 'Darwin':
# We're on a MacOSX system, check that
# the right version information is returned
fd = os.popen('sw_vers', 'r')
real_ver = None
for ln in fd:
if ln.startswith('ProductVersion:'):
real_ver = ln.strip().split()[-1]
break
fd.close()
self.assertFalse(real_ver is None)
result_list = res[0].split('.')
expect_list = real_ver.split('.')
len_diff = len(result_list) - len(expect_list)
# On Snow Leopard, sw_vers reports 10.6.0 as 10.6
if len_diff > 0:
expect_list.extend(['0'] * len_diff)
self.assertEqual(result_list, expect_list)
# res[1] claims to contain
# (version, dev_stage, non_release_version)
# That information is no longer available
self.assertEqual(res[1], ('', '', ''))
if sys.byteorder == 'little':
self.assertIn(res[2], ('i386', 'x86_64'))
else:
self.assertEqual(res[2], 'PowerPC')
@unittest.skipUnless(sys.platform == 'darwin', "OSX only test")
def test_mac_ver_with_fork(self):
# Issue7895: platform.mac_ver() crashes when using fork without exec
#
# This test checks that the fix for that issue works.
#
pid = os.fork()
if pid == 0:
# child
info = platform.mac_ver()
os._exit(0)
else:
# parent
cpid, sts = os.waitpid(pid, 0)
self.assertEqual(cpid, pid)
self.assertEqual(sts, 0)
def test_dist(self):
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore',
'dist\(\) and linux_distribution\(\) '
'functions are deprecated .*',
PendingDeprecationWarning,
)
res = platform.dist()
def test_libc_ver(self):
import os
if os.path.isdir(sys.executable) and \
os.path.exists(sys.executable+'.exe'):
# Cygwin horror
executable = sys.executable + '.exe'
else:
executable = sys.executable
res = platform.libc_ver(executable)
def test_parse_release_file(self):
for input, output in (
# Examples of release file contents:
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64')),
('SUSE LINUX 10.1 (X86-64)', ('SUSE LINUX ', '10.1', 'X86-64')),
('SUSE LINUX 10.1 (i586)', ('SUSE LINUX ', '10.1', 'i586')),
('Fedora Core release 5 (Bordeaux)', ('Fedora Core', '5', 'Bordeaux')),
('Red Hat Linux release 8.0 (Psyche)', ('Red Hat Linux', '8.0', 'Psyche')),
('Red Hat Linux release 9 (Shrike)', ('Red Hat Linux', '9', 'Shrike')),
('Red Hat Enterprise Linux release 4 (Nahant)', ('Red Hat Enterprise Linux', '4', 'Nahant')),
('CentOS release 4', ('CentOS', '4', None)),
('Rocks release 4.2.1 (Cydonia)', ('Rocks', '4.2.1', 'Cydonia')),
('', ('', '', '')), # If there's nothing there.
):
self.assertEqual(platform._parse_release_file(input), output)
def test_popen(self):
mswindows = (sys.platform == "win32")
if mswindows:
command = '"{}" -c "print(\'Hello\')"'.format(sys.executable)
else:
command = "'{}' -c 'print(\"Hello\")'".format(sys.executable)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with platform.popen(command) as stdout:
hello = stdout.read().strip()
stdout.close()
self.assertEqual(hello, "Hello")
data = 'plop'
if mswindows:
command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"'
else:
command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'"
command = command.format(sys.executable)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with platform.popen(command, 'w') as stdin:
stdout = stdin.write(data)
ret = stdin.close()
self.assertIsNotNone(ret)
if os.name == 'nt':
returncode = ret
else:
returncode = ret >> 8
self.assertEqual(returncode, len(data))
def test_linux_distribution_encoding(self):
# Issue #17429
with tempfile.TemporaryDirectory() as tempdir:
filename = os.path.join(tempdir, 'fedora-release')
with open(filename, 'w', encoding='utf-8') as f:
f.write('Fedora release 19 (Schr\xf6dinger\u2019s Cat)\n')
with mock.patch('platform._UNIXCONFDIR', tempdir):
with warnings.catch_warnings():
warnings.filterwarnings(
'ignore',
'dist\(\) and linux_distribution\(\) '
'functions are deprecated .*',
PendingDeprecationWarning,
)
distname, version, distid = platform.linux_distribution()
self.assertEqual(distname, 'Fedora')
self.assertEqual(version, '19')
self.assertEqual(distid, 'Schr\xf6dinger\u2019s Cat')
class DeprecationTest(unittest.TestCase):
def test_dist_deprecation(self):
with self.assertWarns(PendingDeprecationWarning) as cm:
platform.dist()
self.assertEqual(str(cm.warning),
'dist() and linux_distribution() functions are '
'deprecated in Python 3.5 and will be removed in '
'Python 3.7')
def test_linux_distribution_deprecation(self):
with self.assertWarns(PendingDeprecationWarning) as cm:
platform.linux_distribution()
self.assertEqual(str(cm.warning),
'dist() and linux_distribution() functions are '
'deprecated in Python 3.5 and will be removed in '
'Python 3.7')
if __name__ == '__main__':
unittest.main()
| mit |
dgorissen/pycel | src/pycel/excelwrapper.py | 1 | 13640 | # -*- coding: UTF-8 -*-
#
# Copyright 2011-2019 by Dirk Gorissen, Stephen Rauch and Contributors
# All rights reserved.
# This file is part of the Pycel Library, Licensed under GPLv3 (the 'License')
# You may not use this work except in compliance with the License.
# You may obtain a copy of the Licence at:
# https://www.gnu.org/licenses/gpl-3.0.en.html
"""
ExcelOpxWrapper : Can be run anywhere but only with post 2010 Excel formats
ExcelOpxWrapperNoData :
Can be initialized with a instance of an OpenPyXl workbook
"""
import abc
import collections
import os
from unittest import mock
from openpyxl import load_workbook, Workbook
from openpyxl.cell.cell import Cell, MergedCell
from openpyxl.formula.translate import Translator
from pycel.excelutil import AddressCell, AddressRange, flatten, is_address
ARRAY_FORMULA_NAME = '=CSE_INDEX'
ARRAY_FORMULA_FORMAT = '{}(%s,%s,%s,%s,%s)'.format(ARRAY_FORMULA_NAME)
class ExcelWrapper:
__metaclass__ = abc.ABCMeta
RangeData = collections.namedtuple('RangeData', 'address formula values')
@abc.abstractmethod
def get_range(self, address):
""""""
@abc.abstractmethod
def get_used_range(self):
""""""
@abc.abstractmethod
def get_active_sheet_name(self):
""""""
def get_formula_from_range(self, address):
if not is_address(address):
address = AddressRange(address)
result = self.get_range(address)
if isinstance(address, AddressCell):
return result.formula if result.formula.startswith("=") else None
else:
return tuple(tuple(
self.get_formula_from_range(a) for a in row
) for row in result.resolve_range)
def get_formula_or_value(self, address):
if not is_address(address):
address = AddressRange(address)
result = self.get_range(address)
if isinstance(address, AddressCell):
return result.formula or result.values
else:
return tuple(tuple(
self.get_formula_or_value(a) for a in row
) for row in result.resolve_range)
class _OpxRange(ExcelWrapper.RangeData):
""" Excel range wrapper that distributes reduced api used by compiler
(Formula & Value)
"""
def __new__(cls, cells, cells_dataonly, address):
formula = None
value = cells[0][0].value
if isinstance(value, str) and value.startswith(ARRAY_FORMULA_NAME):
# if this range refers to a CSE Array Formula, get the formula
front, *args = cells[0][0].value[:-1].rsplit(',', 4)
# if this range corresponds to the top left of a CSE Array formula
if (args[0] == args[1] == '1') and all(
c.value and c.value.startswith(front)
for c in flatten(cells)):
# apply formula to the range
formula = '={%s}' % front[len(ARRAY_FORMULA_NAME) + 1:]
else:
formula = tuple(tuple(cls.cell_to_formula(cell) for cell in row)
for row in cells)
values = tuple(tuple(cell.value for cell in row)
for row in cells_dataonly)
return ExcelWrapper.RangeData.__new__(cls, address, formula, values)
@classmethod
def cell_to_formula(cls, cell):
if cell.value is None:
return ''
else:
formula = str(cell.value)
if not formula.startswith('='):
return ''
elif formula.startswith('={') and formula[-1] == '}':
# This is not in a CSE Array Context
return '=index({},1,1)'.format(formula[1:])
elif formula.startswith(ARRAY_FORMULA_NAME):
# These are CSE Array formulas as encoded from sheet
params = formula[len(ARRAY_FORMULA_NAME) + 1:-1].rsplit(',', 4)
start_row = cell.row - int(params[1]) + 1
start_col_idx = cell.col_idx - int(params[2]) + 1
end_row = start_row + int(params[3]) - 1
end_col_idx = start_col_idx + int(params[4]) - 1
cse_range = AddressRange(
(start_col_idx, start_row, end_col_idx, end_row),
sheet=cell.parent.title)
return '=index({},{},{})'.format(
cse_range.quoted_address, *params[1:3])
else:
return formula
@property
def resolve_range(self):
return AddressRange(
(self.address.start.col_idx,
self.address.start.row,
self.address.start.col_idx + len(self.values[0]) - 1,
self.address.start.row + len(self.values) - 1),
sheet=self.address.sheet
).resolve_range
class _OpxCell(_OpxRange):
""" Excel cell wrapper that distributes reduced api used by compiler
(Formula & Value)
"""
def __new__(cls, cell, cell_dataonly, address):
assert isinstance(address, AddressCell)
return ExcelWrapper.RangeData.__new__(
cls, address, cls.cell_to_formula(cell), cell_dataonly.value)
class ExcelOpxWrapper(ExcelWrapper):
""" OpenPyXl implementation for ExcelWrapper interface """
CfRule = collections.namedtuple(
'CfRule', 'formula priority dxf_id dxf stop_if_true')
def __init__(self, filename, app=None):
super(ExcelWrapper, self).__init__()
self.filename = os.path.abspath(filename)
self._defined_names = None
self._tables = None
self._table_refs = {}
self.workbook = None
self.workbook_dataonly = None
self._max_col_row = {}
def max_col_row(self, sheet):
if sheet not in self._max_col_row:
worksheet = self.workbook[sheet]
self._max_col_row[sheet] = worksheet.max_column, worksheet.max_row
return self._max_col_row[sheet]
@property
def defined_names(self):
if self.workbook is not None and self._defined_names is None:
self._defined_names = {}
for d_name in self.workbook.defined_names.definedName:
destinations = [
(alias, wksht) for wksht, alias in d_name.destinations
if wksht in self.workbook]
if len(destinations):
self._defined_names[str(d_name.name)] = destinations
return self._defined_names
def table(self, table_name):
""" Return the table and the sheet it was found on
:param table_name: name of table to retrieve
:return: table, sheet_name
"""
# table names are case insensitive
if self._tables is None:
TableAndSheet = collections.namedtuple(
'TableAndSheet', 'table, sheet_name')
self._tables = {
t.name.lower(): TableAndSheet(t, ws.title)
for ws in self.workbook for t in self._worksheet_tables(ws)}
self._tables[None] = TableAndSheet(None, None)
return self._tables.get(table_name.lower(), self._tables[None])
def table_name_containing(self, address):
""" Return the table name containing the address given """
address = AddressCell(address)
if address not in self._table_refs:
for t in self._worksheet_tables(self.workbook[address.sheet]):
if address in AddressRange(t.ref):
self._table_refs[address] = t.name.lower()
break
return self._table_refs.get(address)
def _worksheet_tables(self, ws): # pragma: no cover
"""::HACK:: workaround for unsupported tables access in openpyxl < 3.0.4"""
try:
return ws.tables.values()
except AttributeError:
# hack for openpyxl versions < 3.0.4
return ws._tables
def conditional_format(self, address):
""" Return the conditional formats applicable for this cell """
address = AddressCell(address)
all_formats = self.workbook[address.sheet].conditional_formatting
formats = (cf for cf in all_formats if address.coordinate in cf)
rules = []
for cf in formats:
origin = AddressRange(cf.cells.ranges[0].coord).start
row_offset = address.row - origin.row
col_offset = address.col_idx - origin.col_idx
for rule in cf.rules:
if rule.formula:
trans = Translator(
'={}'.format(rule.formula[0]), origin.coordinate)
formula = trans.translate_formula(
row_delta=row_offset, col_delta=col_offset)
rules.append(self.CfRule(
formula=formula,
priority=rule.priority,
dxf_id=rule.dxfId,
dxf=rule.dxf,
stop_if_true=rule.stopIfTrue,
))
return sorted(rules, key=lambda x: x.priority)
def load(self):
# work around type coercion to datetime that causes some issues
with mock.patch('openpyxl.worksheet._reader.from_excel',
self.from_excel):
self.workbook = load_workbook(self.filename)
self.workbook_dataonly = load_workbook(
self.filename, data_only=True)
self.load_array_formulas()
def load_array_formulas(self):
# expand array formulas
for ws in self.workbook:
for address, props in ws.formula_attributes.items():
if props.get('t') != 'array':
continue # pragma: no cover
# get the reference address for the array formula
ref_addr = AddressRange(props.get('ref'))
if isinstance(ref_addr, AddressRange):
formula = ws[address].value
for i, row in enumerate(ref_addr.rows, start=1):
for j, addr in enumerate(row, start=1):
ws[addr.coordinate] = ARRAY_FORMULA_FORMAT % (
formula[1:], i, j, *ref_addr.size)
def set_sheet(self, s):
self.workbook.active = self.workbook.index(self.workbook[s])
self.workbook_dataonly.active = self.workbook_dataonly.index(
self.workbook_dataonly[s])
return self.workbook.active
@staticmethod
def from_excel(value, *args, **kwargs):
# ::HACK:: excel thinks that 1900/02/29 was a thing. In certain
# circumstances openpyxl will return a datetime. This is a problem
# as we don't want them, and having been mapped to datetime
# information may have been lost, so ignore the conversions
return value
def get_range(self, address):
if not is_address(address):
address = AddressRange(address)
if address.has_sheet:
sheet = self.workbook[address.sheet]
sheet_dataonly = self.workbook_dataonly[address.sheet]
else:
sheet = self.workbook.active
sheet_dataonly = self.workbook_dataonly.active
with mock.patch('openpyxl.worksheet._reader.from_excel',
self.from_excel):
# work around type coercion to datetime that causes some issues
if address.is_unbounded_range:
# bound the address range to the data in the spreadsheet
address = address & AddressRange(
(1, 1, *self.max_col_row(sheet.title)),
sheet=sheet.title)
cells = sheet[address.coordinate]
cells_dataonly = sheet_dataonly[address.coordinate]
if isinstance(cells, (Cell, MergedCell)):
return _OpxCell(cells, cells_dataonly, address)
else:
return _OpxRange(cells, cells_dataonly, address)
def get_used_range(self):
return self.workbook.active.iter_rows()
def get_active_sheet_name(self):
return self.workbook.active.title
class ExcelOpxWrapperNoData(ExcelOpxWrapper):
""" ExcelWrapper interface from openpyxl workbook,
without data_only workbook """
@staticmethod
def excel_value(formula, value):
"""A openpyxl sheet does not have values for formula cells"""
return None if formula else value
class OpxRange(_OpxRange):
def __new__(cls, range_data):
values = tuple(
tuple(ExcelOpxWrapperNoData.excel_value(*cell)
for cell in zip(row_f, row_v))
for row_f, row_v in zip(range_data.formula, range_data.values)
)
return ExcelWrapper.RangeData.__new__(
cls, range_data.address, range_data.formula, values)
class OpxCell(_OpxCell):
def __new__(cls, cell_data):
value = ExcelOpxWrapperNoData.excel_value(
cell_data.formula, cell_data.values)
return ExcelWrapper.RangeData.__new__(
cls, cell_data.address, cell_data.formula, value)
def __init__(self, workbook, filename='Unknown'):
super().__init__(filename=filename)
assert isinstance(workbook, Workbook)
self.workbook = workbook
self.workbook_dataonly = workbook
self.load_array_formulas()
def get_range(self, address):
data = super().get_range(address)
if isinstance(data.values, tuple):
return self.OpxRange(data)
else:
return self.OpxCell(data)
| gpl-3.0 |
shuangshuangwang/spark | python/pyspark/sql/tests/test_conf.py | 1 | 2399 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark.testing.sqlutils import ReusedSQLTestCase
class ConfTests(ReusedSQLTestCase):
def test_conf(self):
spark = self.spark
spark.conf.set("bogo", "sipeo")
self.assertEqual(spark.conf.get("bogo"), "sipeo")
spark.conf.set("bogo", "ta")
self.assertEqual(spark.conf.get("bogo"), "ta")
self.assertEqual(spark.conf.get("bogo", "not.read"), "ta")
self.assertEqual(spark.conf.get("not.set", "ta"), "ta")
self.assertRaisesRegexp(Exception, "not.set", lambda: spark.conf.get("not.set"))
spark.conf.unset("bogo")
self.assertEqual(spark.conf.get("bogo", "colombia"), "colombia")
self.assertEqual(spark.conf.get("hyukjin", None), None)
# This returns 'STATIC' because it's the default value of
# 'spark.sql.sources.partitionOverwriteMode', and `defaultValue` in
# `spark.conf.get` is unset.
self.assertEqual(spark.conf.get("spark.sql.sources.partitionOverwriteMode"), "STATIC")
# This returns None because 'spark.sql.sources.partitionOverwriteMode' is unset, but
# `defaultValue` in `spark.conf.get` is set to None.
self.assertEqual(spark.conf.get("spark.sql.sources.partitionOverwriteMode", None), None)
if __name__ == "__main__":
import unittest
from pyspark.sql.tests.test_conf import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
dcroc16/skunk_works | google_appengine/lib/django-1.4/tests/modeltests/transactions/tests.py | 26 | 11721 | from __future__ import with_statement, absolute_import
from django.db import connection, transaction, IntegrityError
from django.test import TransactionTestCase, skipUnlessDBFeature
from .models import Reporter
class TransactionTests(TransactionTestCase):
def create_a_reporter_then_fail(self, first, last):
a = Reporter(first_name=first, last_name=last)
a.save()
raise Exception("I meant to do that")
def remove_a_reporter(self, first_name):
r = Reporter.objects.get(first_name="Alice")
r.delete()
def manually_managed(self):
r = Reporter(first_name="Dirk", last_name="Gently")
r.save()
transaction.commit()
def manually_managed_mistake(self):
r = Reporter(first_name="Edward", last_name="Woodward")
r.save()
# Oops, I forgot to commit/rollback!
@skipUnlessDBFeature('supports_transactions')
def test_autocommit(self):
"""
The default behavior is to autocommit after each save() action.
"""
self.assertRaises(Exception,
self.create_a_reporter_then_fail,
"Alice", "Smith"
)
# The object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_decorator(self):
"""
The autocommit decorator works exactly the same as the default behavior.
"""
autocomitted_create_then_fail = transaction.autocommit(
self.create_a_reporter_then_fail
)
self.assertRaises(Exception,
autocomitted_create_then_fail,
"Alice", "Smith"
)
# Again, the object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_decorator_with_using(self):
"""
The autocommit decorator also works with a using argument.
"""
autocomitted_create_then_fail = transaction.autocommit(using='default')(
self.create_a_reporter_then_fail
)
self.assertRaises(Exception,
autocomitted_create_then_fail,
"Alice", "Smith"
)
# Again, the object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success(self):
"""
With the commit_on_success decorator, the transaction is only committed
if the function doesn't throw an exception.
"""
committed_on_success = transaction.commit_on_success(
self.create_a_reporter_then_fail)
self.assertRaises(Exception, committed_on_success, "Dirk", "Gently")
# This time the object never got saved
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_with_using(self):
"""
The commit_on_success decorator also works with a using argument.
"""
using_committed_on_success = transaction.commit_on_success(using='default')(
self.create_a_reporter_then_fail
)
self.assertRaises(Exception,
using_committed_on_success,
"Dirk", "Gently"
)
# This time the object never got saved
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_succeed(self):
"""
If there aren't any exceptions, the data will get saved.
"""
Reporter.objects.create(first_name="Alice", last_name="Smith")
remove_comitted_on_success = transaction.commit_on_success(
self.remove_a_reporter
)
remove_comitted_on_success("Alice")
self.assertEqual(list(Reporter.objects.all()), [])
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_exit(self):
@transaction.autocommit()
def gen_reporter():
@transaction.commit_on_success
def create_reporter():
Reporter.objects.create(first_name="Bobby", last_name="Tables")
create_reporter()
# Much more formal
r = Reporter.objects.get()
r.first_name = "Robert"
r.save()
gen_reporter()
r = Reporter.objects.get()
self.assertEqual(r.first_name, "Robert")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed(self):
"""
You can manually manage transactions if you really want to, but you
have to remember to commit/rollback.
"""
manually_managed = transaction.commit_manually(self.manually_managed)
manually_managed()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_mistake(self):
"""
If you forget, you'll get bad errors.
"""
manually_managed_mistake = transaction.commit_manually(
self.manually_managed_mistake
)
self.assertRaises(transaction.TransactionManagementError,
manually_managed_mistake)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_with_using(self):
"""
The commit_manually function also works with a using argument.
"""
using_manually_managed_mistake = transaction.commit_manually(using='default')(
self.manually_managed_mistake
)
self.assertRaises(transaction.TransactionManagementError,
using_manually_managed_mistake
)
class TransactionRollbackTests(TransactionTestCase):
def execute_bad_sql(self):
cursor = connection.cursor()
cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');")
transaction.set_dirty()
@skipUnlessDBFeature('requires_rollback_on_dirty_transaction')
def test_bad_sql(self):
"""
Regression for #11900: If a function wrapped by commit_on_success
writes a transaction that can't be committed, that transaction should
be rolled back. The bug is only visible using the psycopg2 backend,
though the fix is generally a good idea.
"""
execute_bad_sql = transaction.commit_on_success(self.execute_bad_sql)
self.assertRaises(IntegrityError, execute_bad_sql)
transaction.rollback()
class TransactionContextManagerTests(TransactionTestCase):
def create_reporter_and_fail(self):
Reporter.objects.create(first_name="Bob", last_name="Holtzman")
raise Exception
@skipUnlessDBFeature('supports_transactions')
def test_autocommit(self):
"""
The default behavior is to autocommit after each save() action.
"""
with self.assertRaises(Exception):
self.create_reporter_and_fail()
# The object created before the exception still exists
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_context_manager(self):
"""
The autocommit context manager works exactly the same as the default
behavior.
"""
with self.assertRaises(Exception):
with transaction.autocommit():
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_autocommit_context_manager_with_using(self):
"""
The autocommit context manager also works with a using argument.
"""
with self.assertRaises(Exception):
with transaction.autocommit(using="default"):
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success(self):
"""
With the commit_on_success context manager, the transaction is only
committed if the block doesn't throw an exception.
"""
with self.assertRaises(Exception):
with transaction.commit_on_success():
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_with_using(self):
"""
The commit_on_success context manager also works with a using argument.
"""
with self.assertRaises(Exception):
with transaction.commit_on_success(using="default"):
self.create_reporter_and_fail()
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_succeed(self):
"""
If there aren't any exceptions, the data will get saved.
"""
Reporter.objects.create(first_name="Alice", last_name="Smith")
with transaction.commit_on_success():
Reporter.objects.filter(first_name="Alice").delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
@skipUnlessDBFeature('supports_transactions')
def test_commit_on_success_exit(self):
with transaction.autocommit():
with transaction.commit_on_success():
Reporter.objects.create(first_name="Bobby", last_name="Tables")
# Much more formal
r = Reporter.objects.get()
r.first_name = "Robert"
r.save()
r = Reporter.objects.get()
self.assertEqual(r.first_name, "Robert")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed(self):
"""
You can manually manage transactions if you really want to, but you
have to remember to commit/rollback.
"""
with transaction.commit_manually():
Reporter.objects.create(first_name="Libby", last_name="Holtzman")
transaction.commit()
self.assertEqual(Reporter.objects.count(), 1)
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_mistake(self):
"""
If you forget, you'll get bad errors.
"""
with self.assertRaises(transaction.TransactionManagementError):
with transaction.commit_manually():
Reporter.objects.create(first_name="Scott", last_name="Browning")
@skipUnlessDBFeature('supports_transactions')
def test_manually_managed_with_using(self):
"""
The commit_manually function also works with a using argument.
"""
with self.assertRaises(transaction.TransactionManagementError):
with transaction.commit_manually(using="default"):
Reporter.objects.create(first_name="Walter", last_name="Cronkite")
@skipUnlessDBFeature('requires_rollback_on_dirty_transaction')
def test_bad_sql(self):
"""
Regression for #11900: If a block wrapped by commit_on_success
writes a transaction that can't be committed, that transaction should
be rolled back. The bug is only visible using the psycopg2 backend,
though the fix is generally a good idea.
"""
with self.assertRaises(IntegrityError):
with transaction.commit_on_success():
cursor = connection.cursor()
cursor.execute("INSERT INTO transactions_reporter (first_name, last_name) VALUES ('Douglas', 'Adams');")
transaction.set_dirty()
transaction.rollback()
| mit |
tomasreimers/tensorflow-emscripten | tensorflow/contrib/factorization/python/ops/gmm_ops_test.py | 18 | 7929 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for gmm_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import time
# TODO: #6568 Remove this hack that makes dlopen() not crash.
if hasattr(sys, 'getdlopenflags') and hasattr(sys, 'setdlopenflags'):
import ctypes
sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL)
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.factorization.python.ops import gmm_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed as random_seed_lib
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
class GmmOpsTest(test.TestCase):
def setUp(self):
self.num_examples = 1000
self.iterations = 40
self.seed = 4
random_seed_lib.set_random_seed(self.seed)
np.random.seed(self.seed * 2)
self.data, self.true_assignments = self.make_data(self.num_examples)
# Generate more complicated data.
self.centers = [[1, 1], [-1, 0.5], [2, 1]]
self.more_data, self.more_true_assignments = self.make_data_from_centers(
self.num_examples, self.centers)
@staticmethod
def make_data(num_vectors):
"""Generates 2-dimensional data centered on (2,2), (-1,-1).
Args:
num_vectors: number of training examples.
Returns:
A tuple containing the data as a numpy array and the cluster ids.
"""
vectors = []
classes = []
for _ in xrange(num_vectors):
if np.random.random() > 0.5:
vectors.append([np.random.normal(2.0, 0.6), np.random.normal(2.0, 0.9)])
classes.append(0)
else:
vectors.append(
[np.random.normal(-1.0, 0.4), np.random.normal(-1.0, 0.5)])
classes.append(1)
return np.asarray(vectors), classes
@staticmethod
def make_data_from_centers(num_vectors, centers):
"""Generates 2-dimensional data with random centers.
Args:
num_vectors: number of training examples.
centers: a list of random 2-dimensional centers.
Returns:
A tuple containing the data as a numpy array and the cluster ids.
"""
vectors = []
classes = []
for _ in xrange(num_vectors):
current_class = np.random.random_integers(0, len(centers) - 1)
vectors.append([
np.random.normal(centers[current_class][0],
np.random.random_sample()),
np.random.normal(centers[current_class][1], np.random.random_sample())
])
classes.append(current_class)
return np.asarray(vectors), len(centers)
def test_covariance(self):
start_time = time.time()
data = self.data.T
np_cov = np.cov(data)
logging.info('Numpy took %f', time.time() - start_time)
start_time = time.time()
with self.test_session() as sess:
op = gmm_ops._covariance(
constant_op.constant(
data.T, dtype=dtypes.float32), False)
op_diag = gmm_ops._covariance(
constant_op.constant(
data.T, dtype=dtypes.float32), True)
variables.global_variables_initializer().run()
tf_cov = sess.run(op)
np.testing.assert_array_almost_equal(np_cov, tf_cov)
logging.info('Tensorflow took %f', time.time() - start_time)
tf_cov = sess.run(op_diag)
np.testing.assert_array_almost_equal(
np.diag(np_cov), np.ravel(tf_cov), decimal=5)
def test_simple_cluster(self):
"""Tests that the clusters are correct."""
num_classes = 2
graph = ops.Graph()
with graph.as_default() as g:
g.seed = 5
with self.test_session() as sess:
data = constant_op.constant(self.data, dtype=dtypes.float32)
_, assignments, _, training_op = gmm_ops.gmm(data,
'random',
num_classes,
random_seed=self.seed)
variables.global_variables_initializer().run()
for _ in xrange(self.iterations):
sess.run(training_op)
assignments = sess.run(assignments)
accuracy = np.mean(
np.asarray(self.true_assignments) == np.squeeze(assignments))
logging.info('Accuracy: %f', accuracy)
self.assertGreater(accuracy, 0.98)
def testParams(self):
"""Tests that the params work as intended."""
num_classes = 2
with self.test_session() as sess:
# Experiment 1. Update weights only.
data = constant_op.constant(self.data, dtype=dtypes.float32)
gmm_tool = gmm_ops.GmmAlgorithm([data], num_classes,
[[3.0, 3.0], [0.0, 0.0]], 'w')
training_ops = gmm_tool.training_ops()
variables.global_variables_initializer().run()
for _ in xrange(self.iterations):
sess.run(training_ops)
# Only the probability to each class is updated.
alphas = sess.run(gmm_tool.alphas())
self.assertGreater(alphas[1], 0.6)
means = sess.run(gmm_tool.clusters())
np.testing.assert_almost_equal(
np.expand_dims([[3.0, 3.0], [0.0, 0.0]], 1), means)
covs = sess.run(gmm_tool.covariances())
np.testing.assert_almost_equal(covs[0], covs[1])
# Experiment 2. Update means and covariances.
gmm_tool = gmm_ops.GmmAlgorithm([data], num_classes,
[[3.0, 3.0], [0.0, 0.0]], 'mc')
training_ops = gmm_tool.training_ops()
variables.global_variables_initializer().run()
for _ in xrange(self.iterations):
sess.run(training_ops)
alphas = sess.run(gmm_tool.alphas())
self.assertAlmostEqual(alphas[0], alphas[1])
means = sess.run(gmm_tool.clusters())
np.testing.assert_almost_equal(
np.expand_dims([[2.0, 2.0], [-1.0, -1.0]], 1), means, decimal=1)
covs = sess.run(gmm_tool.covariances())
np.testing.assert_almost_equal(
[[0.371111, -0.0050774], [-0.0050774, 0.8651744]], covs[0], decimal=4)
np.testing.assert_almost_equal(
[[0.146976, 0.0259463], [0.0259463, 0.2543971]], covs[1], decimal=4)
# Experiment 3. Update covariances only.
gmm_tool = gmm_ops.GmmAlgorithm([data], num_classes,
[[-1.0, -1.0], [1.0, 1.0]], 'c')
training_ops = gmm_tool.training_ops()
variables.global_variables_initializer().run()
for _ in xrange(self.iterations):
sess.run(training_ops)
alphas = sess.run(gmm_tool.alphas())
self.assertAlmostEqual(alphas[0], alphas[1])
means = sess.run(gmm_tool.clusters())
np.testing.assert_almost_equal(
np.expand_dims([[-1.0, -1.0], [1.0, 1.0]], 1), means)
covs = sess.run(gmm_tool.covariances())
np.testing.assert_almost_equal(
[[0.1299582, 0.0435872], [0.0435872, 0.2558578]], covs[0], decimal=5)
np.testing.assert_almost_equal(
[[3.195385, 2.6989155], [2.6989155, 3.3881593]], covs[1], decimal=5)
if __name__ == '__main__':
test.main()
| apache-2.0 |
ThinkingBridge/platform_external_chromium_org | third_party/protobuf/python/google/protobuf/internal/encoder.py | 484 | 25695 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Code for encoding protocol message primitives.
Contains the logic for encoding every logical protocol field type
into one of the 5 physical wire types.
This code is designed to push the Python interpreter's performance to the
limits.
The basic idea is that at startup time, for every field (i.e. every
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
sizer takes a value of this field's type and computes its byte size. The
encoder takes a writer function and a value. It encodes the value into byte
strings and invokes the writer function to write those strings. Typically the
writer function is the write() method of a cStringIO.
We try to do as much work as possible when constructing the writer and the
sizer rather than when calling them. In particular:
* We copy any needed global functions to local variables, so that we do not need
to do costly global table lookups at runtime.
* Similarly, we try to do any attribute lookups at startup time if possible.
* Every field's tag is encoded to bytes at startup, since it can't change at
runtime.
* Whatever component of the field size we can compute at startup, we do.
* We *avoid* sharing code if doing so would make the code slower and not sharing
does not burden us too much. For example, encoders for repeated fields do
not just call the encoders for singular fields in a loop because this would
add an extra function call overhead for every loop iteration; instead, we
manually inline the single-value encoder into the loop.
* If a Python function lacks a return statement, Python actually generates
instructions to pop the result of the last statement off the stack, push
None onto the stack, and then return that. If we really don't care what
value is returned, then we can save two instructions by returning the
result of the last statement. It looks funny but it helps.
* We assume that type and bounds checking has happened at a higher level.
"""
__author__ = 'kenton@google.com (Kenton Varda)'
import struct
from google.protobuf.internal import wire_format
# This will overflow and thus become IEEE-754 "infinity". We would use
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
_POS_INF = 1e10000
_NEG_INF = -_POS_INF
def _VarintSize(value):
"""Compute the size of a varint value."""
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _SignedVarintSize(value):
"""Compute the size of a signed varint value."""
if value < 0: return 10
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _TagSize(field_number):
"""Returns the number of bytes required to serialize a tag with this field
number."""
# Just pass in type 0, since the type won't affect the tag+type size.
return _VarintSize(wire_format.PackTag(field_number, 0))
# --------------------------------------------------------------------
# In this section we define some generic sizers. Each of these functions
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
# It returns another function which in turn takes parameters specific to a
# particular field, e.g. the field number and whether it is repeated or packed.
# Look at the next section to see how these are used.
def _SimpleSizer(compute_value_size):
"""A sizer which uses the function compute_value_size to compute the size of
each value. Typically compute_value_size is _VarintSize."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(element)
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(element)
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(value)
return FieldSize
return SpecificSizer
def _ModifiedSizer(compute_value_size, modify_value):
"""Like SimpleSizer, but modify_value is invoked on each value before it is
passed to compute_value_size. modify_value is typically ZigZagEncode."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(modify_value(element))
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(modify_value(element))
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(modify_value(value))
return FieldSize
return SpecificSizer
def _FixedSizer(value_size):
"""Like _SimpleSizer except for a fixed-size field. The input is the size
of one value."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = len(value) * value_size
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
element_size = value_size + tag_size
def RepeatedFieldSize(value):
return len(value) * element_size
return RepeatedFieldSize
else:
field_size = value_size + tag_size
def FieldSize(value):
return field_size
return FieldSize
return SpecificSizer
# ====================================================================
# Here we declare a sizer constructor for each field type. Each "sizer
# constructor" is a function that takes (field_number, is_repeated, is_packed)
# as parameters and returns a sizer, which in turn takes a field value as
# a parameter and returns its encoded size.
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
_SignedVarintSize, wire_format.ZigZagEncode)
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
BoolSizer = _FixedSizer(1)
def StringSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a string field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element.encode('utf-8'))
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value.encode('utf-8'))
return tag_size + local_VarintSize(l) + l
return FieldSize
def BytesSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a bytes field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element)
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value)
return tag_size + local_VarintSize(l) + l
return FieldSize
def GroupSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a group field."""
tag_size = _TagSize(field_number) * 2
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += element.ByteSize()
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + value.ByteSize()
return FieldSize
def MessageSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a message field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = element.ByteSize()
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = value.ByteSize()
return tag_size + local_VarintSize(l) + l
return FieldSize
# --------------------------------------------------------------------
# MessageSet is special.
def MessageSetItemSizer(field_number):
"""Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
_TagSize(3))
local_VarintSize = _VarintSize
def FieldSize(value):
l = value.ByteSize()
return static_size + local_VarintSize(l) + l
return FieldSize
# ====================================================================
# Encoders!
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
local_chr = chr
def EncodeVarint(write, value):
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeVarint
def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
local_chr = chr
def EncodeSignedVarint(write, value):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeSignedVarint
_EncodeVarint = _VarintEncoder()
_EncodeSignedVarint = _SignedVarintEncoder()
def _VarintBytes(value):
"""Encode the given integer as a varint and return the bytes. This is only
called at startup time so it doesn't need to be fast."""
pieces = []
_EncodeVarint(pieces.append, value)
return "".join(pieces)
def TagBytes(field_number, wire_type):
"""Encode the given tag and return the bytes. Only called at startup."""
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
# --------------------------------------------------------------------
# As with sizers (see above), we have a number of common encoder
# implementations.
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
"""Return a constructor for an encoder for fields of a particular type.
Args:
wire_type: The field's wire type, for encoding tags.
encode_value: A function which encodes an individual value, e.g.
_EncodeVarint().
compute_value_size: A function which computes the size of an individual
value, e.g. _VarintSize().
"""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(element)
local_EncodeVarint(write, size)
for element in value:
encode_value(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, value)
return EncodeField
return SpecificEncoder
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
"""Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
local_EncodeVarint(write, size)
for element in value:
encode_value(write, modify_value(element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, modify_value(element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, modify_value(value))
return EncodeField
return SpecificEncoder
def _StructPackEncoder(wire_type, format):
"""Return a constructor for an encoder for a fixed-width field.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
write(local_struct_pack(format, element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
write(local_struct_pack(format, element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return write(local_struct_pack(format, value))
return EncodeField
return SpecificEncoder
def _FloatingPointEncoder(wire_type, format):
"""Return a constructor for an encoder for float fields.
This is like StructPackEncoder, but catches errors that may be due to
passing non-finite floating-point values to struct.pack, and makes a
second attempt to encode those values.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
if value_size == 4:
def EncodeNonFiniteOrRaise(write, value):
# Remember that the serialized form uses little-endian byte order.
if value == _POS_INF:
write('\x00\x00\x80\x7F')
elif value == _NEG_INF:
write('\x00\x00\x80\xFF')
elif value != value: # NaN
write('\x00\x00\xC0\x7F')
else:
raise
elif value_size == 8:
def EncodeNonFiniteOrRaise(write, value):
if value == _POS_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\x7F')
elif value == _NEG_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\xFF')
elif value != value: # NaN
write('\x00\x00\x00\x00\x00\x00\xF8\x7F')
else:
raise
else:
raise ValueError('Can\'t encode floating-point values that are '
'%d bytes long (only 4 or 8)' % value_size)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
# This try/except block is going to be faster than any code that
# we could write to check whether element is finite.
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
try:
write(local_struct_pack(format, value))
except SystemError:
EncodeNonFiniteOrRaise(write, value)
return EncodeField
return SpecificEncoder
# ====================================================================
# Here we declare an encoder constructor for each field type. These work
# very similarly to sizer constructors, described earlier.
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
wire_format.ZigZagEncode)
# Note that Python conveniently guarantees that when using the '<' prefix on
# formats, they will also have the same size across all platforms (as opposed
# to without the prefix, where their sizes depend on the C compiler's basic
# type sizes).
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
def BoolEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a boolean field."""
false_byte = chr(0)
true_byte = chr(1)
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value))
for element in value:
if element:
write(true_byte)
else:
write(false_byte)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
if element:
write(true_byte)
else:
write(false_byte)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeField(write, value):
write(tag_bytes)
if value:
return write(true_byte)
return write(false_byte)
return EncodeField
def StringEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a string field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
encoded = element.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
write(encoded)
return EncodeRepeatedField
else:
def EncodeField(write, value):
encoded = value.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
return write(encoded)
return EncodeField
def BytesEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a bytes field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, local_len(element))
write(element)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, local_len(value))
return write(value)
return EncodeField
def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField
def MessageEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a message field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, element.ByteSize())
element._InternalSerialize(write)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, value.ByteSize())
return value._InternalSerialize(write)
return EncodeField
# --------------------------------------------------------------------
# As before, MessageSet is special.
def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = "".join([
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
TagBytes(2, wire_format.WIRETYPE_VARINT),
_VarintBytes(field_number),
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
def EncodeField(write, value):
write(start_bytes)
local_EncodeVarint(write, value.ByteSize())
value._InternalSerialize(write)
return write(end_bytes)
return EncodeField
| bsd-3-clause |
Bysmyyr/chromium-crosswalk | ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py | 31 | 12485 | #!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script lays out the PNaCl translator files for a
normal Chrome installer, for one platform. Once run num-of-arches times,
the result can then be packed into a multi-CRX zip file.
This script depends on and pulls in the translator nexes and libraries
from the PNaCl translator. It also depends on the pnacl_irt_shim.
"""
import json
import logging
import optparse
import os
import platform
import re
import shutil
import subprocess
import sys
J = os.path.join
######################################################################
# Target arch and build arch junk to convert between all the
# silly conventions between SCons, Chrome and PNaCl.
# The version of the arch used by NaCl manifest files.
# This is based on the machine "building" this extension.
# We also used this to identify the arch-specific different versions of
# this extension.
def CanonicalArch(arch):
if arch in ('x86_64', 'x86-64', 'x64', 'amd64'):
return 'x86-64'
# TODO(jvoung): be more specific about the arm architecture version?
if arch in ('arm', 'armv7'):
return 'arm'
if arch in ('mipsel'):
return 'mips32'
if re.match('^i.86$', arch) or arch in ('x86_32', 'x86-32', 'ia32', 'x86'):
return 'x86-32'
return None
def GetBuildArch():
arch = platform.machine()
return CanonicalArch(arch)
BUILD_ARCH = GetBuildArch()
ARCHES = ['x86-32', 'x86-64', 'arm', 'mips32']
def IsValidArch(arch):
return arch in ARCHES
######################################################################
# Normalize the platform name to be the way SCons finds chrome binaries.
# This is based on the platform "building" the extension.
def GetBuildPlatform():
if sys.platform == 'darwin':
platform = 'mac'
elif sys.platform.startswith('linux'):
platform = 'linux'
elif sys.platform in ('cygwin', 'win32'):
platform = 'windows'
else:
raise Exception('Unknown platform: %s' % sys.platform)
return platform
BUILD_PLATFORM = GetBuildPlatform()
def DetermineInstallerArches(target_arch):
arch = CanonicalArch(target_arch)
if not IsValidArch(arch):
raise Exception('Unknown target_arch %s' % target_arch)
# On windows, we need x86-32 and x86-64 (assuming non-windows RT).
if BUILD_PLATFORM == 'windows':
if arch.startswith('x86'):
return ['x86-32', 'x86-64']
else:
raise Exception('Unknown target_arch on windows w/ target_arch == %s' %
target_arch)
else:
return [arch]
######################################################################
class PnaclPackaging(object):
package_base = os.path.dirname(__file__)
# File paths that are set from the command line.
pnacl_template = None
package_version_path = None
pnacl_package = 'pnacl_newlib'
# Agreed-upon name for pnacl-specific info.
pnacl_json = 'pnacl.json'
@staticmethod
def SetPnaclInfoTemplatePath(path):
PnaclPackaging.pnacl_template = path
@staticmethod
def SetPackageVersionPath(path):
PnaclPackaging.package_version_path = path
@staticmethod
def SetPnaclPackageName(name):
PnaclPackaging.pnacl_package = name
@staticmethod
def PnaclToolsRevision():
pkg_ver_cmd = [sys.executable, PnaclPackaging.package_version_path,
'getrevision',
'--revision-package', PnaclPackaging.pnacl_package]
return subprocess.check_output(pkg_ver_cmd).strip()
@staticmethod
def GeneratePnaclInfo(target_dir, abi_version, arch):
# A note on versions: pnacl_version is the version of translator built
# by the NaCl repo, while abi_version is bumped when the NaCl sandbox
# actually changes.
pnacl_version = PnaclPackaging.PnaclToolsRevision()
with open(PnaclPackaging.pnacl_template, 'r') as pnacl_template_fd:
pnacl_template = json.load(pnacl_template_fd)
out_name = J(target_dir, UseWhitelistedChars(PnaclPackaging.pnacl_json,
None))
with open(out_name, 'w') as output_fd:
pnacl_template['pnacl-arch'] = arch
pnacl_template['pnacl-version'] = pnacl_version
json.dump(pnacl_template, output_fd, sort_keys=True, indent=4)
######################################################################
class PnaclDirs(object):
translator_dir = None
output_dir = None
@staticmethod
def SetTranslatorRoot(d):
PnaclDirs.translator_dir = d
@staticmethod
def TranslatorRoot():
return PnaclDirs.translator_dir
@staticmethod
def LibDir(target_arch):
return J(PnaclDirs.TranslatorRoot(), 'translator', '%s' % target_arch)
@staticmethod
def SandboxedCompilerDir(target_arch):
return J(PnaclDirs.TranslatorRoot(), 'translator', target_arch, 'bin')
@staticmethod
def SetOutputDir(d):
PnaclDirs.output_dir = d
@staticmethod
def OutputDir():
return PnaclDirs.output_dir
@staticmethod
def OutputAllDir(version_quad):
return J(PnaclDirs.OutputDir(), version_quad)
@staticmethod
def OutputArchBase(arch):
return '%s' % arch
@staticmethod
def OutputArchDir(arch):
# Nest this in another directory so that the layout will be the same
# as the "all"/universal version.
parent_dir = J(PnaclDirs.OutputDir(), PnaclDirs.OutputArchBase(arch))
return (parent_dir, J(parent_dir, PnaclDirs.OutputArchBase(arch)))
######################################################################
def StepBanner(short_desc, long_desc):
logging.info("**** %s\t%s", short_desc, long_desc)
def Clean():
out_dir = PnaclDirs.OutputDir()
StepBanner('CLEAN', 'Cleaning out old packaging: %s' % out_dir)
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
else:
logging.info('Clean skipped -- no previous output directory!')
######################################################################
def UseWhitelistedChars(orig_basename, arch):
""" Make the filename match the pattern expected by nacl_file_host.
Currently, this assumes there is prefix "pnacl_public_" and
that the allowed chars are in the set [a-zA-Z0-9_].
"""
if arch:
target_basename = 'pnacl_public_%s_%s' % (arch, orig_basename)
else:
target_basename = 'pnacl_public_%s' % orig_basename
result = re.sub(r'[^a-zA-Z0-9_]', '_', target_basename)
logging.info('UseWhitelistedChars using: %s' % result)
return result
def CopyFlattenDirsAndPrefix(src_dir, arch, dest_dir):
""" Copy files from src_dir to dest_dir.
When copying, also rename the files such that they match the white-listing
pattern in chrome/browser/nacl_host/nacl_file_host.cc.
"""
if not os.path.isdir(src_dir):
raise Exception('Copy dir failed, directory does not exist: %s' % src_dir)
for (root, dirs, files) in os.walk(src_dir, followlinks=True):
for f in files:
# Assume a flat directory.
assert (f == os.path.basename(f))
full_name = J(root, f)
target_name = UseWhitelistedChars(f, arch)
shutil.copy(full_name, J(dest_dir, target_name))
def BuildArchForInstaller(version_quad, arch, lib_overrides):
""" Build an architecture specific version for the chrome installer.
"""
target_dir = PnaclDirs.OutputDir()
StepBanner('BUILD INSTALLER',
'Packaging for arch %s in %s' % (arch, target_dir))
# Copy llc.nexe and ld.nexe, but with some renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.SandboxedCompilerDir(arch),
arch,
target_dir)
# Copy native libraries, also with renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.LibDir(arch), arch, target_dir)
# Also copy files from the list of overrides.
# This needs the arch tagged onto the name too, like the other files.
if arch in lib_overrides:
for (override_lib, desired_name) in lib_overrides[arch]:
target_name = UseWhitelistedChars(desired_name, arch)
shutil.copy(override_lib, J(target_dir, target_name))
def BuildInstallerStyle(version_quad, lib_overrides, arches):
""" Package the pnacl component for use within the chrome installer
infrastructure. These files need to be named in a special way
so that white-listing of files is easy.
"""
StepBanner("BUILD_ALL", "Packaging installer for version: %s" % version_quad)
for arch in arches:
BuildArchForInstaller(version_quad, arch, lib_overrides)
# Generate pnacl info manifest.
# Hack around the fact that there may be more than one arch, on Windows.
if len(arches) == 1:
arches = arches[0]
PnaclPackaging.GeneratePnaclInfo(PnaclDirs.OutputDir(), version_quad, arches)
######################################################################
def Main():
usage = 'usage: %prog [options] version_arg'
parser = optparse.OptionParser(usage)
# We may want to accept a target directory to dump it in the usual
# output directory (e.g., scons-out).
parser.add_option('-c', '--clean', dest='clean',
action='store_true', default=False,
help='Clean out destination directory first.')
parser.add_option('-d', '--dest', dest='dest',
help='The destination root for laying out the extension')
parser.add_option('-L', '--lib_override',
dest='lib_overrides', action='append', default=[],
help='Specify path to a fresher native library ' +
'that overrides the tarball library with ' +
'(arch,libfile,librenamed) tuple.')
parser.add_option('-t', '--target_arch',
dest='target_arch', default=None,
help='Only generate the chrome installer version for arch')
parser.add_option('--info_template_path',
dest='info_template_path', default=None,
help='Path of the info template file')
parser.add_option('--package_version_path', dest='package_version_path',
default=None, help='Path to package_version.py script.')
parser.add_option('--pnacl_package_name', dest='pnacl_package_name',
default=None, help='Name of PNaCl package.')
parser.add_option('--pnacl_translator_path', dest='pnacl_translator_path',
default=None, help='Location of PNaCl translator.')
parser.add_option('-v', '--verbose', dest='verbose', default=False,
action='store_true',
help='Print verbose debug messages.')
(options, args) = parser.parse_args()
if options.verbose:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.ERROR)
logging.info('pnacl_component_crx_gen w/ options %s and args %s\n'
% (options, args))
# Set destination directory before doing any cleaning, etc.
if options.dest is None:
raise Exception('Destination path must be set.')
PnaclDirs.SetOutputDir(options.dest)
if options.clean:
Clean()
if options.pnacl_translator_path is None:
raise Exception('PNaCl translator path must be set.')
PnaclDirs.SetTranslatorRoot(options.pnacl_translator_path)
if options.info_template_path:
PnaclPackaging.SetPnaclInfoTemplatePath(options.info_template_path)
if options.package_version_path:
PnaclPackaging.SetPackageVersionPath(options.package_version_path)
else:
raise Exception('Package verison script must be specified.')
if options.pnacl_package_name:
PnaclPackaging.SetPnaclPackageName(options.pnacl_package_name)
lib_overrides = {}
for o in options.lib_overrides:
arch, override_lib, desired_name = o.split(',')
arch = CanonicalArch(arch)
if not IsValidArch(arch):
raise Exception('Unknown arch for -L: %s (from %s)' % (arch, o))
if not os.path.isfile(override_lib):
raise Exception('Override native lib not a file for -L: %s (from %s)' %
(override_lib, o))
override_list = lib_overrides.get(arch, [])
override_list.append((override_lib, desired_name))
lib_overrides[arch] = override_list
if len(args) != 1:
parser.print_help()
parser.error('Incorrect number of arguments')
abi_version = int(args[0])
arches = DetermineInstallerArches(options.target_arch)
BuildInstallerStyle(abi_version, lib_overrides, arches)
return 0
if __name__ == '__main__':
sys.exit(Main())
| bsd-3-clause |
tushar7795/MicroBlog | flask/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py | 59 | 3942 | # mysql/zxjdbc.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+zxjdbc
:name: zxjdbc for Jython
:dbapi: zxjdbc
:connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/\
<database>
:driverurl: http://dev.mysql.com/downloads/connector/j/
.. note:: Jython is not supported by current versions of SQLAlchemy. The
zxjdbc dialect should be considered as experimental.
Character Sets
--------------
SQLAlchemy zxjdbc dialects pass unicode straight through to the
zxjdbc/JDBC layer. To allow multiple character sets to be sent from the
MySQL Connector/J JDBC driver, by default SQLAlchemy sets its
``characterEncoding`` connection property to ``UTF-8``. It may be
overridden via a ``create_engine`` URL parameter.
"""
import re
from ... import types as sqltypes, util
from ...connectors.zxJDBC import ZxJDBCConnector
from .base import BIT, MySQLDialect, MySQLExecutionContext
class _ZxJDBCBit(BIT):
def result_processor(self, dialect, coltype):
"""Converts boolean or byte arrays from MySQL Connector/J to longs."""
def process(value):
if value is None:
return value
if isinstance(value, bool):
return int(value)
v = 0
for i in value:
v = v << 8 | (i & 0xff)
value = v
return value
return process
class MySQLExecutionContext_zxjdbc(MySQLExecutionContext):
def get_lastrowid(self):
cursor = self.create_cursor()
cursor.execute("SELECT LAST_INSERT_ID()")
lastrowid = cursor.fetchone()[0]
cursor.close()
return lastrowid
class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
jdbc_db_name = 'mysql'
jdbc_driver_name = 'com.mysql.jdbc.Driver'
execution_ctx_cls = MySQLExecutionContext_zxjdbc
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
sqltypes.Time: sqltypes.Time,
BIT: _ZxJDBCBit
}
)
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
# Prefer 'character_set_results' for the current connection over the
# value in the driver. SET NAMES or individual variable SETs will
# change the charset without updating the driver's view of the world.
#
# If it's decided that issuing that sort of SQL leaves you SOL, then
# this can prefer the driver value.
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
opts = dict((row[0], row[1]) for row in self._compat_fetchall(rs))
for key in ('character_set_connection', 'character_set'):
if opts.get(key, None):
return opts[key]
util.warn("Could not detect the connection character set. "
"Assuming latin1.")
return 'latin1'
def _driver_kwargs(self):
"""return kw arg dict to be sent to connect()."""
return dict(characterEncoding='UTF-8', yearIsDateType='false')
def _extract_error_code(self, exception):
# e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
# [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
for n in r.split(dbapi_con.dbversion):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
dialect = MySQLDialect_zxjdbc
| bsd-3-clause |
912/M-new | virtualenvironment/experimental/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/predicates.py | 114 | 1787 | """
This module houses the GEOS ctypes prototype functions for the
unary and binary predicate operations on geometries.
"""
from ctypes import c_char, c_char_p, c_double
from django.contrib.gis.geos.libgeos import GEOM_PTR
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
## Binary & unary predicate functions ##
def binary_predicate(func, *args):
"For GEOS binary predicate functions."
argtypes = [GEOM_PTR, GEOM_PTR]
if args:
argtypes += args
func.argtypes = argtypes
func.restype = c_char
func.errcheck = check_predicate
return func
def unary_predicate(func):
"For GEOS unary predicate functions."
func.argtypes = [GEOM_PTR]
func.restype = c_char
func.errcheck = check_predicate
return func
## Unary Predicates ##
geos_hasz = unary_predicate(GEOSFunc('GEOSHasZ'))
geos_isempty = unary_predicate(GEOSFunc('GEOSisEmpty'))
geos_isring = unary_predicate(GEOSFunc('GEOSisRing'))
geos_issimple = unary_predicate(GEOSFunc('GEOSisSimple'))
geos_isvalid = unary_predicate(GEOSFunc('GEOSisValid'))
## Binary Predicates ##
geos_contains = binary_predicate(GEOSFunc('GEOSContains'))
geos_crosses = binary_predicate(GEOSFunc('GEOSCrosses'))
geos_disjoint = binary_predicate(GEOSFunc('GEOSDisjoint'))
geos_equals = binary_predicate(GEOSFunc('GEOSEquals'))
geos_equalsexact = binary_predicate(GEOSFunc('GEOSEqualsExact'), c_double)
geos_intersects = binary_predicate(GEOSFunc('GEOSIntersects'))
geos_overlaps = binary_predicate(GEOSFunc('GEOSOverlaps'))
geos_relatepattern = binary_predicate(GEOSFunc('GEOSRelatePattern'), c_char_p)
geos_touches = binary_predicate(GEOSFunc('GEOSTouches'))
geos_within = binary_predicate(GEOSFunc('GEOSWithin'))
| gpl-2.0 |
mohittahiliani/PIE-ns3 | examples/realtime/realtime-udp-echo.py | 195 | 3526 | #
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation;
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Network topology
#
# n0 n1 n2 n3
# | | | |
# =================
# LAN
#
# - UDP flows from n0 to n1 and back
# - DropTail queues
# - Tracing of queues and packet receptions to file "udp-echo.tr"
import ns.applications
import ns.core
import ns.csma
import ns.internet
import ns.network
def main(argv):
#
# Allow the user to override any of the defaults and the above Bind() at
# run-time, via command-line arguments
#
cmd = ns.core.CommandLine()
cmd.Parse(argv)
#
# But since this is a realtime script, don't allow the user to mess with
# that.
#
ns.core.GlobalValue.Bind("SimulatorImplementationType", ns.core.StringValue("ns3::RealtimeSimulatorImpl"))
#
# Explicitly create the nodes required by the topology (shown above).
#
print "Create nodes."
n = ns.network.NodeContainer()
n.Create(4)
internet = ns.internet.InternetStackHelper()
internet.Install(n)
#
# Explicitly create the channels required by the topology (shown above).
#
print ("Create channels.")
csma = ns.csma.CsmaHelper()
csma.SetChannelAttribute("DataRate", ns.network.DataRateValue(ns.network.DataRate(5000000)))
csma.SetChannelAttribute("Delay", ns.core.TimeValue(ns.core.MilliSeconds(2)));
csma.SetDeviceAttribute("Mtu", ns.core.UintegerValue(1400))
d = csma.Install(n)
#
# We've got the "hardware" in place. Now we need to add IP addresses.
#
print ("Assign IP Addresses.")
ipv4 = ns.internet.Ipv4AddressHelper()
ipv4.SetBase(ns.network.Ipv4Address("10.1.1.0"), ns.network.Ipv4Mask("255.255.255.0"))
i = ipv4.Assign(d)
print ("Create Applications.")
#
# Create a UdpEchoServer application on node one.
#
port = 9 # well-known echo port number
server = ns.applications.UdpEchoServerHelper(port)
apps = server.Install(n.Get(1))
apps.Start(ns.core.Seconds(1.0))
apps.Stop(ns.core.Seconds(10.0))
#
# Create a UdpEchoClient application to send UDP datagrams from node zero to
# node one.
#
packetSize = 1024
maxPacketCount = 500
interPacketInterval = ns.core.Seconds(0.01)
client = ns.applications.UdpEchoClientHelper(i.GetAddress (1), port)
client.SetAttribute("MaxPackets", ns.core.UintegerValue(maxPacketCount))
client.SetAttribute("Interval", ns.core.TimeValue(interPacketInterval))
client.SetAttribute("PacketSize", ns.core.UintegerValue(packetSize))
apps = client.Install(n.Get(0))
apps.Start(ns.core.Seconds(2.0))
apps.Stop(ns.core.Seconds(10.0))
ascii = ns.network.AsciiTraceHelper()
csma.EnableAsciiAll(ascii.CreateFileStream("realtime-udp-echo.tr"))
csma.EnablePcapAll("realtime-udp-echo", False)
#
# Now, do the actual simulation.
#
print ("Run Simulation.")
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
print ("Done.")
if __name__ == '__main__':
import sys
main(sys.argv)
| gpl-2.0 |
HasanIssa88/EMG_Classification | Linear _Reg_Max.py | 1 | 1805 | import sklearn
import scipy.stats as stats
from sklearn.datasets import load_boston
import pandas as pd
import numpy as np
from sklearn.linear_model import LinearRegression
import matplotlib.pylab as plt
df=pd.read_csv('02_02_precision_max_C_1.txt',sep=',',skiprows=13,header=None,na_values="null",delimiter=',')
df.columns=['Force_sensor','EMG_radial_1','EMG_radial_2','EMG_radial_3','EMG_radial_4',
'EMG_radial_5','EMG_radial_6','EMG_special_1','EMG_special_2','EMG_special_3','EMG_special_4']
df2=pd.read_csv('02_01_precision_05_050.txt',sep=',',skiprows=13,header=None,na_values="null",delimiter=',')
df2.columns=['Force_sensor','EMG_radial_1','EMG_radial_2','EMG_radial_3','EMG_radial_4',
'EMG_radial_5','EMG_radial_6','EMG_special_1','EMG_special_2','EMG_special_3','EMG_special_4']
# Droping the force sensor column
X=df.drop(['Force_sensor','EMG_special_4','EMG_special_2','EMG_special_3','EMG_special_1'],inplace=False,axis=1,errors='ignore')
X_train, X_test,Y_train,Y_test=sklearn.cross_validation.train_test_split(X,df.Force_sensor,test_size=0.33,random_state=5)
print(X_train.shape)
print(X_test.shape)
print(Y_train.shape)
print(Y_test.shape)
lm=LinearRegression()
lm.fit(X_train, Y_train)
pred_train=lm.predict(X_train)
pred_test=lm.predict(X_test)
print('Fit a model X_train, and calculate MSE with Y_train: :',np.mean(Y_train-lm.predict(X_train))**2)
print ('Fit a model X_train, and calculate MSE with X_test, Y_test ',np.mean(Y_test-lm.predict(X_test))**2)
plt.scatter(lm.predict(X_train),lm.predict(X_train)-Y_train,c='b',s=40,alpha=0.5)
plt.scatter(lm.predict(X_test),lm.predict(X_test)-Y_test,c='g',s=40)
plt.hlines(y=0,xmin=0,xmax=50)
plt.title('Residual plot using training (BLUE) and test(GREEN) data')
plt.ylabel('Residuals')
plt.show()
| gpl-3.0 |
mne-tools/mne-tools.github.io | 0.21/_downloads/5f5e63d32397437d1a83eaaa5e2e20fd/plot_read_proj.py | 5 | 2128 |
"""
==============================================
Read and visualize projections (SSP and other)
==============================================
This example shows how to read and visualize Signal Subspace Projectors (SSP)
vector. Such projections are sometimes referred to as PCA projections.
"""
# Author: Joan Massich <mailsik@gmail.com>
#
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne import read_proj
from mne.io import read_raw_fif
from mne.datasets import sample
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
fname = data_path + '/MEG/sample/sample_audvis_raw.fif'
ecg_fname = data_path + '/MEG/sample/sample_audvis_ecg-proj.fif'
###############################################################################
# Load the FIF file and display the projections present in the file. Here the
# projections are added to the file during the acquisition and are obtained
# from empty room recordings.
raw = read_raw_fif(fname)
empty_room_proj = raw.info['projs']
# Display the projections stored in `info['projs']` from the raw object
raw.plot_projs_topomap()
###############################################################################
# Display the projections one by one
n_cols = len(empty_room_proj)
fig, axes = plt.subplots(1, n_cols, figsize=(2 * n_cols, 2))
for proj, ax in zip(empty_room_proj, axes):
proj.plot_topomap(axes=ax, info=raw.info)
###############################################################################
# Use the function in `mne.viz` to display a list of projections
assert isinstance(empty_room_proj, list)
mne.viz.plot_projs_topomap(empty_room_proj, info=raw.info)
###############################################################################
# .. TODO: add this when the tutorial is up: "As shown in the tutorial
# :doc:`../auto_tutorials/preprocessing/plot_projectors`, ..."
#
# The ECG projections can be loaded from a file and added to the raw object
# read the projections
ecg_projs = read_proj(ecg_fname)
# add them to raw and plot everything
raw.add_proj(ecg_projs)
raw.plot_projs_topomap()
| bsd-3-clause |
ininex/geofire-python | resource/lib/python2.7/site-packages/geoindex/geo_grid_index.py | 2 | 3135 | from itertools import chain
import geohash
from . import utils, GeoPoint
# dependence between hashtag's precision and distance accurate calculating
# in fact it's sizes of grids in km
GEO_HASH_GRID_SIZE = {
1: 5000.0,
2: 1260.0,
3: 156.0,
4: 40.0,
5: 4.8,
6: 1.22,
7: 0.152,
8: 0.038
}
class GeoGridIndex(object):
"""
Class for store index based on geohash of points for quick-and-dry
neighbors search
"""
def __init__(self, precision=4):
"""
:param precision:
"""
self.precision = precision
self.data = {}
def get_point_hash(self, point):
"""
return geohash for given point with self.precision
:param point: GeoPoint instance
:return: string
"""
return geohash.encode(point.latitude, point.longitude, self.precision)
def add_point(self, point):
"""
add point to index, point must be a GeoPoint instance
:param point:
:return:
"""
assert isinstance(point, GeoPoint), \
'point should be GeoPoint instance'
point_hash = self.get_point_hash(point)
points = self.data.setdefault(point_hash, [])
points.append(point)
def get_nearest_points_dirty(self, center_point, radius, unit='km'):
"""
return approx list of point from circle with given center and radius
it uses geohash and return with some error (see GEO_HASH_ERRORS)
:param center_point: center of search circle
:param radius: radius of search circle
:return: list of GeoPoints from given area
"""
if unit == 'mi':
radius = utils.mi_to_km(radius)
grid_size = GEO_HASH_GRID_SIZE[self.precision]
if radius > grid_size / 2:
# radius is too big for current grid, we cannot use 9 neighbors
# to cover all possible points
suggested_precision = 0
for precision, max_size in GEO_HASH_GRID_SIZE.items():
if radius > max_size / 2:
suggested_precision = precision - 1
break
raise ValueError(
'Too large radius, please rebuild GeoHashGrid with '
'precision={0}'.format(suggested_precision)
)
me_and_neighbors = geohash.expand(self.get_point_hash(center_point))
return chain(*(self.data.get(key, []) for key in me_and_neighbors))
def get_nearest_points(self, center_point, radius, unit='km'):
"""
return list of geo points from circle with given center and radius
:param center_point: GeoPoint with center of search circle
:param radius: radius of search circle
:return: generator with tuple with GeoPoints and distance
"""
assert isinstance(center_point, GeoPoint), \
'point should be GeoPoint instance'
for point in self.get_nearest_points_dirty(center_point, radius):
distance = point.distance_to(center_point, unit)
if distance <= radius:
yield point, distance
| mit |
lukas-bednar/pytest | testing/python/collect.py | 2 | 39934 | import sys
from textwrap import dedent
import _pytest._code
import py
import pytest
from _pytest.main import EXIT_NOTESTSCOLLECTED
class TestModule:
def test_failing_import(self, testdir):
modcol = testdir.getmodulecol("import alksdjalskdjalkjals")
pytest.raises(ImportError, modcol.collect)
pytest.raises(ImportError, modcol.collect)
def test_import_duplicate(self, testdir):
a = testdir.mkdir("a")
b = testdir.mkdir("b")
p = a.ensure("test_whatever.py")
p.pyimport()
del py.std.sys.modules['test_whatever']
b.ensure("test_whatever.py")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*import*mismatch*",
"*imported*test_whatever*",
"*%s*" % a.join("test_whatever.py"),
"*not the same*",
"*%s*" % b.join("test_whatever.py"),
"*HINT*",
])
def test_import_prepend_append(self, testdir, monkeypatch):
syspath = list(sys.path)
monkeypatch.setattr(sys, "path", syspath)
root1 = testdir.mkdir("root1")
root2 = testdir.mkdir("root2")
root1.ensure("x456.py")
root2.ensure("x456.py")
p = root2.join("test_x456.py")
monkeypatch.syspath_prepend(str(root1))
p.write(dedent("""\
import x456
def test():
assert x456.__file__.startswith(%r)
""" % str(root2)))
with root2.as_cwd():
reprec = testdir.inline_run("--import-mode=append")
reprec.assertoutcome(passed=0, failed=1)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_syntax_error_in_module(self, testdir):
modcol = testdir.getmodulecol("this is a syntax error")
pytest.raises(modcol.CollectError, modcol.collect)
pytest.raises(modcol.CollectError, modcol.collect)
def test_module_considers_pluginmanager_at_import(self, testdir):
modcol = testdir.getmodulecol("pytest_plugins='xasdlkj',")
pytest.raises(ImportError, lambda: modcol.obj)
class TestClass:
def test_class_with_init_warning(self, testdir):
testdir.makepyfile("""
class TestClass1:
def __init__(self):
pass
""")
result = testdir.runpytest("-rw")
result.stdout.fnmatch_lines_random("""
WC1*test_class_with_init_warning.py*__init__*
""")
def test_class_subclassobject(self, testdir):
testdir.getmodulecol("""
class test(object):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*collected 0*",
])
def test_setup_teardown_class_as_classmethod(self, testdir):
testdir.makepyfile(test_mod1="""
class TestClassMethod:
@classmethod
def setup_class(cls):
pass
def test_1(self):
pass
@classmethod
def teardown_class(cls):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 passed*",
])
def test_issue1035_obj_has_getattr(self, testdir):
modcol = testdir.getmodulecol("""
class Chameleon(object):
def __getattr__(self, name):
return True
chameleon = Chameleon()
""")
colitems = modcol.collect()
assert len(colitems) == 0
class TestGenerator:
def test_generative_functions(self, testdir):
modcol = testdir.getmodulecol("""
def func1(arg, arg2):
assert arg == arg2
def test_gen():
yield func1, 17, 3*5
yield func1, 42, 6*7
""")
colitems = modcol.collect()
assert len(colitems) == 1
gencol = colitems[0]
assert isinstance(gencol, pytest.Generator)
gencolitems = gencol.collect()
assert len(gencolitems) == 2
assert isinstance(gencolitems[0], pytest.Function)
assert isinstance(gencolitems[1], pytest.Function)
assert gencolitems[0].name == '[0]'
assert gencolitems[0].obj.__name__ == 'func1'
def test_generative_methods(self, testdir):
modcol = testdir.getmodulecol("""
def func1(arg, arg2):
assert arg == arg2
class TestGenMethods:
def test_gen(self):
yield func1, 17, 3*5
yield func1, 42, 6*7
""")
gencol = modcol.collect()[0].collect()[0].collect()[0]
assert isinstance(gencol, pytest.Generator)
gencolitems = gencol.collect()
assert len(gencolitems) == 2
assert isinstance(gencolitems[0], pytest.Function)
assert isinstance(gencolitems[1], pytest.Function)
assert gencolitems[0].name == '[0]'
assert gencolitems[0].obj.__name__ == 'func1'
def test_generative_functions_with_explicit_names(self, testdir):
modcol = testdir.getmodulecol("""
def func1(arg, arg2):
assert arg == arg2
def test_gen():
yield "seventeen", func1, 17, 3*5
yield "fortytwo", func1, 42, 6*7
""")
colitems = modcol.collect()
assert len(colitems) == 1
gencol = colitems[0]
assert isinstance(gencol, pytest.Generator)
gencolitems = gencol.collect()
assert len(gencolitems) == 2
assert isinstance(gencolitems[0], pytest.Function)
assert isinstance(gencolitems[1], pytest.Function)
assert gencolitems[0].name == "['seventeen']"
assert gencolitems[0].obj.__name__ == 'func1'
assert gencolitems[1].name == "['fortytwo']"
assert gencolitems[1].obj.__name__ == 'func1'
def test_generative_functions_unique_explicit_names(self, testdir):
# generative
modcol = testdir.getmodulecol("""
def func(): pass
def test_gen():
yield "name", func
yield "name", func
""")
colitems = modcol.collect()
assert len(colitems) == 1
gencol = colitems[0]
assert isinstance(gencol, pytest.Generator)
pytest.raises(ValueError, "gencol.collect()")
def test_generative_methods_with_explicit_names(self, testdir):
modcol = testdir.getmodulecol("""
def func1(arg, arg2):
assert arg == arg2
class TestGenMethods:
def test_gen(self):
yield "m1", func1, 17, 3*5
yield "m2", func1, 42, 6*7
""")
gencol = modcol.collect()[0].collect()[0].collect()[0]
assert isinstance(gencol, pytest.Generator)
gencolitems = gencol.collect()
assert len(gencolitems) == 2
assert isinstance(gencolitems[0], pytest.Function)
assert isinstance(gencolitems[1], pytest.Function)
assert gencolitems[0].name == "['m1']"
assert gencolitems[0].obj.__name__ == 'func1'
assert gencolitems[1].name == "['m2']"
assert gencolitems[1].obj.__name__ == 'func1'
def test_order_of_execution_generator_same_codeline(self, testdir, tmpdir):
o = testdir.makepyfile("""
def test_generative_order_of_execution():
import py, pytest
test_list = []
expected_list = list(range(6))
def list_append(item):
test_list.append(item)
def assert_order_of_execution():
py.builtin.print_('expected order', expected_list)
py.builtin.print_('but got ', test_list)
assert test_list == expected_list
for i in expected_list:
yield list_append, i
yield assert_order_of_execution
""")
reprec = testdir.inline_run(o)
passed, skipped, failed = reprec.countoutcomes()
assert passed == 7
assert not skipped and not failed
def test_order_of_execution_generator_different_codeline(self, testdir):
o = testdir.makepyfile("""
def test_generative_tests_different_codeline():
import py, pytest
test_list = []
expected_list = list(range(3))
def list_append_2():
test_list.append(2)
def list_append_1():
test_list.append(1)
def list_append_0():
test_list.append(0)
def assert_order_of_execution():
py.builtin.print_('expected order', expected_list)
py.builtin.print_('but got ', test_list)
assert test_list == expected_list
yield list_append_0
yield list_append_1
yield list_append_2
yield assert_order_of_execution
""")
reprec = testdir.inline_run(o)
passed, skipped, failed = reprec.countoutcomes()
assert passed == 4
assert not skipped and not failed
def test_setupstate_is_preserved_134(self, testdir):
# yield-based tests are messy wrt to setupstate because
# during collection they already invoke setup functions
# and then again when they are run. For now, we want to make sure
# that the old 1.3.4 behaviour is preserved such that all
# yielded functions all share the same "self" instance that
# has been used during collection.
o = testdir.makepyfile("""
setuplist = []
class TestClass:
def setup_method(self, func):
#print "setup_method", self, func
setuplist.append(self)
self.init = 42
def teardown_method(self, func):
self.init = None
def test_func1(self):
pass
def test_func2(self):
yield self.func2
yield self.func2
def func2(self):
assert self.init
def test_setuplist():
# once for test_func2 during collection
# once for test_func1 during test run
# once for test_func2 during test run
#print setuplist
assert len(setuplist) == 3, len(setuplist)
assert setuplist[0] == setuplist[2], setuplist
assert setuplist[1] != setuplist[2], setuplist
""")
reprec = testdir.inline_run(o, '-v')
passed, skipped, failed = reprec.countoutcomes()
assert passed == 4
assert not skipped and not failed
class TestFunction:
def test_getmodulecollector(self, testdir):
item = testdir.getitem("def test_func(): pass")
modcol = item.getparent(pytest.Module)
assert isinstance(modcol, pytest.Module)
assert hasattr(modcol.obj, 'test_func')
def test_function_as_object_instance_ignored(self, testdir):
testdir.makepyfile("""
class A:
def __call__(self, tmpdir):
0/0
test_a = A()
""")
reprec = testdir.inline_run()
reprec.assertoutcome()
def test_function_equality(self, testdir, tmpdir):
from _pytest.python import FixtureManager
config = testdir.parseconfigure()
session = testdir.Session(config)
session._fixturemanager = FixtureManager(session)
def func1():
pass
def func2():
pass
f1 = pytest.Function(name="name", parent=session, config=config,
args=(1,), callobj=func1)
assert f1 == f1
f2 = pytest.Function(name="name",config=config,
callobj=func2, parent=session)
assert f1 != f2
def test_issue197_parametrize_emptyset(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize('arg', [])
def test_function(arg):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=1)
def test_single_tuple_unwraps_values(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.parametrize(('arg',), [(1,)])
def test_function(arg):
assert arg == 1
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_issue213_parametrize_value_no_equal(self, testdir):
testdir.makepyfile("""
import pytest
class A:
def __eq__(self, other):
raise ValueError("not possible")
@pytest.mark.parametrize('arg', [A()])
def test_function(arg):
assert arg.__class__.__name__ == "A"
""")
reprec = testdir.inline_run("--fulltrace")
reprec.assertoutcome(passed=1)
def test_parametrize_with_non_hashable_values(self, testdir):
"""Test parametrization with non-hashable values."""
testdir.makepyfile("""
archival_mapping = {
'1.0': {'tag': '1.0'},
'1.2.2a1': {'tag': 'release-1.2.2a1'},
}
import pytest
@pytest.mark.parametrize('key value'.split(),
archival_mapping.items())
def test_archival_to_version(key, value):
assert key in archival_mapping
assert value == archival_mapping[key]
""")
rec = testdir.inline_run()
rec.assertoutcome(passed=2)
def test_parametrize_with_non_hashable_values_indirect(self, testdir):
"""Test parametrization with non-hashable values with indirect parametrization."""
testdir.makepyfile("""
archival_mapping = {
'1.0': {'tag': '1.0'},
'1.2.2a1': {'tag': 'release-1.2.2a1'},
}
import pytest
@pytest.fixture
def key(request):
return request.param
@pytest.fixture
def value(request):
return request.param
@pytest.mark.parametrize('key value'.split(),
archival_mapping.items(), indirect=True)
def test_archival_to_version(key, value):
assert key in archival_mapping
assert value == archival_mapping[key]
""")
rec = testdir.inline_run()
rec.assertoutcome(passed=2)
def test_parametrize_overrides_fixture(self, testdir):
"""Test parametrization when parameter overrides existing fixture with same name."""
testdir.makepyfile("""
import pytest
@pytest.fixture
def value():
return 'value'
@pytest.mark.parametrize('value',
['overridden'])
def test_overridden_via_param(value):
assert value == 'overridden'
@pytest.mark.parametrize('somevalue', ['overridden'])
def test_not_overridden(value, somevalue):
assert value == 'value'
assert somevalue == 'overridden'
@pytest.mark.parametrize('other,value', [('foo', 'overridden')])
def test_overridden_via_multiparam(other, value):
assert other == 'foo'
assert value == 'overridden'
""")
rec = testdir.inline_run()
rec.assertoutcome(passed=3)
def test_parametrize_overrides_parametrized_fixture(self, testdir):
"""Test parametrization when parameter overrides existing parametrized fixture with same name."""
testdir.makepyfile("""
import pytest
@pytest.fixture(params=[1, 2])
def value(request):
return request.param
@pytest.mark.parametrize('value',
['overridden'])
def test_overridden_via_param(value):
assert value == 'overridden'
""")
rec = testdir.inline_run()
rec.assertoutcome(passed=1)
def test_parametrize_with_mark(selfself, testdir):
items = testdir.getitems("""
import pytest
@pytest.mark.foo
@pytest.mark.parametrize('arg', [
1,
pytest.mark.bar(pytest.mark.baz(2))
])
def test_function(arg):
pass
""")
keywords = [item.keywords for item in items]
assert 'foo' in keywords[0] and 'bar' not in keywords[0] and 'baz' not in keywords[0]
assert 'foo' in keywords[1] and 'bar' in keywords[1] and 'baz' in keywords[1]
def test_function_equality_with_callspec(self, testdir, tmpdir):
items = testdir.getitems("""
import pytest
@pytest.mark.parametrize('arg', [1,2])
def test_function(arg):
pass
""")
assert items[0] != items[1]
assert not (items[0] == items[1])
def test_pyfunc_call(self, testdir):
item = testdir.getitem("def test_func(): raise ValueError")
config = item.config
class MyPlugin1:
def pytest_pyfunc_call(self, pyfuncitem):
raise ValueError
class MyPlugin2:
def pytest_pyfunc_call(self, pyfuncitem):
return True
config.pluginmanager.register(MyPlugin1())
config.pluginmanager.register(MyPlugin2())
config.hook.pytest_runtest_setup(item=item)
config.hook.pytest_pyfunc_call(pyfuncitem=item)
def test_multiple_parametrize(self, testdir):
modcol = testdir.getmodulecol("""
import pytest
@pytest.mark.parametrize('x', [0, 1])
@pytest.mark.parametrize('y', [2, 3])
def test1(x, y):
pass
""")
colitems = modcol.collect()
assert colitems[0].name == 'test1[2-0]'
assert colitems[1].name == 'test1[2-1]'
assert colitems[2].name == 'test1[3-0]'
assert colitems[3].name == 'test1[3-1]'
def test_issue751_multiple_parametrize_with_ids(self, testdir):
modcol = testdir.getmodulecol("""
import pytest
@pytest.mark.parametrize('x', [0], ids=['c'])
@pytest.mark.parametrize('y', [0, 1], ids=['a', 'b'])
class Test(object):
def test1(self, x, y):
pass
def test2(self, x, y):
pass
""")
colitems = modcol.collect()[0].collect()[0].collect()
assert colitems[0].name == 'test1[a-c]'
assert colitems[1].name == 'test1[b-c]'
assert colitems[2].name == 'test2[a-c]'
assert colitems[3].name == 'test2[b-c]'
def test_parametrize_skipif(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.skipif('True')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_skip_if(x):
assert x < 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 2 passed, 1 skipped in *')
def test_parametrize_skip(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.skip('')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_skip(x):
assert x < 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 2 passed, 1 skipped in *')
def test_parametrize_skipif_no_skip(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.skipif('False')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_skipif_no_skip(x):
assert x < 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 1 failed, 2 passed in *')
def test_parametrize_xfail(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.xfail('True')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_xfail(x):
assert x < 2
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 2 passed, 1 xfailed in *')
def test_parametrize_passed(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.xfail('True')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_xfail(x):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 2 passed, 1 xpassed in *')
def test_parametrize_xfail_passed(self, testdir):
testdir.makepyfile("""
import pytest
m = pytest.mark.xfail('False')
@pytest.mark.parametrize('x', [0, 1, m(2)])
def test_passed(x):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('* 3 passed in *')
class TestSorting:
def test_check_equality(self, testdir):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
fn1 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn1, pytest.Function)
fn2 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn2, pytest.Function)
assert fn1 == fn2
assert fn1 != modcol
if py.std.sys.version_info < (3, 0):
assert cmp(fn1, fn2) == 0
assert hash(fn1) == hash(fn2)
fn3 = testdir.collect_by_name(modcol, "test_fail")
assert isinstance(fn3, pytest.Function)
assert not (fn1 == fn3)
assert fn1 != fn3
for fn in fn1,fn2,fn3:
assert fn != 3
assert fn != modcol
assert fn != [1,2,3]
assert [1,2,3] != fn
assert modcol != fn
def test_allow_sane_sorting_for_decorators(self, testdir):
modcol = testdir.getmodulecol("""
def dec(f):
g = lambda: f(2)
g.place_as = f
return g
def test_b(y):
pass
test_b = dec(test_b)
def test_a(y):
pass
test_a = dec(test_a)
""")
colitems = modcol.collect()
assert len(colitems) == 2
assert [item.name for item in colitems] == ['test_b', 'test_a']
class TestConftestCustomization:
def test_pytest_pycollect_module(self, testdir):
testdir.makeconftest("""
import pytest
class MyModule(pytest.Module):
pass
def pytest_pycollect_makemodule(path, parent):
if path.basename == "test_xyz.py":
return MyModule(path, parent)
""")
testdir.makepyfile("def test_some(): pass")
testdir.makepyfile(test_xyz="def test_func(): pass")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*<Module*test_pytest*",
"*<MyModule*xyz*",
])
def test_customized_pymakemodule_issue205_subdir(self, testdir):
b = testdir.mkdir("a").mkdir("b")
b.join("conftest.py").write(_pytest._code.Source("""
def pytest_pycollect_makemodule(__multicall__):
mod = __multicall__.execute()
mod.obj.hello = "world"
return mod
"""))
b.join("test_module.py").write(_pytest._code.Source("""
def test_hello():
assert hello == "world"
"""))
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_customized_pymakeitem(self, testdir):
b = testdir.mkdir("a").mkdir("b")
b.join("conftest.py").write(_pytest._code.Source("""
import pytest
@pytest.hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem():
outcome = yield
if outcome.excinfo is None:
result = outcome.result
if result:
for func in result:
func._some123 = "world"
"""))
b.join("test_module.py").write(_pytest._code.Source("""
import pytest
@pytest.fixture()
def obj(request):
return request.node._some123
def test_hello(obj):
assert obj == "world"
"""))
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_pytest_pycollect_makeitem(self, testdir):
testdir.makeconftest("""
import pytest
class MyFunction(pytest.Function):
pass
def pytest_pycollect_makeitem(collector, name, obj):
if name == "some":
return MyFunction(name, collector)
""")
testdir.makepyfile("def some(): pass")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyFunction*some*",
])
def test_makeitem_non_underscore(self, testdir, monkeypatch):
modcol = testdir.getmodulecol("def _hello(): pass")
l = []
monkeypatch.setattr(pytest.Module, 'makeitem',
lambda self, name, obj: l.append(name))
l = modcol.collect()
assert '_hello' not in l
def test_setup_only_available_in_subdir(testdir):
sub1 = testdir.mkpydir("sub1")
sub2 = testdir.mkpydir("sub2")
sub1.join("conftest.py").write(_pytest._code.Source("""
import pytest
def pytest_runtest_setup(item):
assert item.fspath.purebasename == "test_in_sub1"
def pytest_runtest_call(item):
assert item.fspath.purebasename == "test_in_sub1"
def pytest_runtest_teardown(item):
assert item.fspath.purebasename == "test_in_sub1"
"""))
sub2.join("conftest.py").write(_pytest._code.Source("""
import pytest
def pytest_runtest_setup(item):
assert item.fspath.purebasename == "test_in_sub2"
def pytest_runtest_call(item):
assert item.fspath.purebasename == "test_in_sub2"
def pytest_runtest_teardown(item):
assert item.fspath.purebasename == "test_in_sub2"
"""))
sub1.join("test_in_sub1.py").write("def test_1(): pass")
sub2.join("test_in_sub2.py").write("def test_2(): pass")
result = testdir.runpytest("-v", "-s")
result.assert_outcomes(passed=2)
def test_modulecol_roundtrip(testdir):
modcol = testdir.getmodulecol("pass", withinit=True)
trail = modcol.nodeid
newcol = modcol.session.perform_collect([trail], genitems=0)[0]
assert modcol.name == newcol.name
class TestTracebackCutting:
def test_skip_simple(self):
excinfo = pytest.raises(pytest.skip.Exception, 'pytest.skip("xxx")')
assert excinfo.traceback[-1].frame.code.name == "skip"
assert excinfo.traceback[-1].ishidden()
def test_traceback_argsetup(self, testdir):
testdir.makeconftest("""
def pytest_funcarg__hello(request):
raise ValueError("xyz")
""")
p = testdir.makepyfile("def test(hello): pass")
result = testdir.runpytest(p)
assert result.ret != 0
out = result.stdout.str()
assert out.find("xyz") != -1
assert out.find("conftest.py:2: ValueError") != -1
numentries = out.count("_ _ _") # separator for traceback entries
assert numentries == 0
result = testdir.runpytest("--fulltrace", p)
out = result.stdout.str()
assert out.find("conftest.py:2: ValueError") != -1
numentries = out.count("_ _ _ _") # separator for traceback entries
assert numentries > 3
def test_traceback_error_during_import(self, testdir):
testdir.makepyfile("""
x = 1
x = 2
x = 17
asd
""")
result = testdir.runpytest()
assert result.ret != 0
out = result.stdout.str()
assert "x = 1" not in out
assert "x = 2" not in out
result.stdout.fnmatch_lines([
" *asd*",
"E*NameError*",
])
result = testdir.runpytest("--fulltrace")
out = result.stdout.str()
assert "x = 1" in out
assert "x = 2" in out
result.stdout.fnmatch_lines([
">*asd*",
"E*NameError*",
])
def test_traceback_filter_error_during_fixture_collection(self, testdir):
"""integration test for issue #995.
"""
testdir.makepyfile("""
import pytest
def fail_me(func):
ns = {}
exec('def w(): raise ValueError("fail me")', ns)
return ns['w']
@pytest.fixture(scope='class')
@fail_me
def fail_fixture():
pass
def test_failing_fixture(fail_fixture):
pass
""")
result = testdir.runpytest()
assert result.ret != 0
out = result.stdout.str()
assert "INTERNALERROR>" not in out
result.stdout.fnmatch_lines([
"*ValueError: fail me*",
"* 1 error in *",
])
def test_filter_traceback_generated_code(self):
"""test that filter_traceback() works with the fact that
py.code.Code.path attribute might return an str object.
In this case, one of the entries on the traceback was produced by
dynamically generated code.
See: https://bitbucket.org/pytest-dev/py/issues/71
This fixes #995.
"""
from _pytest.python import filter_traceback
try:
ns = {}
exec('def foo(): raise ValueError', ns)
ns['foo']()
except ValueError:
_, _, tb = sys.exc_info()
tb = _pytest._code.Traceback(tb)
assert isinstance(tb[-1].path, str)
assert not filter_traceback(tb[-1])
def test_filter_traceback_path_no_longer_valid(self, testdir):
"""test that filter_traceback() works with the fact that
py.code.Code.path attribute might return an str object.
In this case, one of the files in the traceback no longer exists.
This fixes #1133.
"""
from _pytest.python import filter_traceback
testdir.syspathinsert()
testdir.makepyfile(filter_traceback_entry_as_str='''
def foo():
raise ValueError
''')
try:
import filter_traceback_entry_as_str
filter_traceback_entry_as_str.foo()
except ValueError:
_, _, tb = sys.exc_info()
testdir.tmpdir.join('filter_traceback_entry_as_str.py').remove()
tb = _pytest._code.Traceback(tb)
assert isinstance(tb[-1].path, str)
assert filter_traceback(tb[-1])
class TestReportInfo:
def test_itemreport_reportinfo(self, testdir, linecomp):
testdir.makeconftest("""
import pytest
class MyFunction(pytest.Function):
def reportinfo(self):
return "ABCDE", 42, "custom"
def pytest_pycollect_makeitem(collector, name, obj):
if name == "test_func":
return MyFunction(name, parent=collector)
""")
item = testdir.getitem("def test_func(): pass")
item.config.pluginmanager.getplugin("runner")
assert item.location == ("ABCDE", 42, "custom")
def test_func_reportinfo(self, testdir):
item = testdir.getitem("def test_func(): pass")
fspath, lineno, modpath = item.reportinfo()
assert fspath == item.fspath
assert lineno == 0
assert modpath == "test_func"
def test_class_reportinfo(self, testdir):
modcol = testdir.getmodulecol("""
# lineno 0
class TestClass:
def test_hello(self): pass
""")
classcol = testdir.collect_by_name(modcol, "TestClass")
fspath, lineno, msg = classcol.reportinfo()
assert fspath == modcol.fspath
assert lineno == 1
assert msg == "TestClass"
def test_generator_reportinfo(self, testdir):
modcol = testdir.getmodulecol("""
# lineno 0
def test_gen():
def check(x):
assert x
yield check, 3
""")
gencol = testdir.collect_by_name(modcol, "test_gen")
fspath, lineno, modpath = gencol.reportinfo()
assert fspath == modcol.fspath
assert lineno == 1
assert modpath == "test_gen"
genitem = gencol.collect()[0]
fspath, lineno, modpath = genitem.reportinfo()
assert fspath == modcol.fspath
assert lineno == 2
assert modpath == "test_gen[0]"
"""
def test_func():
pass
def test_genfunc():
def check(x):
pass
yield check, 3
class TestClass:
def test_method(self):
pass
"""
def test_reportinfo_with_nasty_getattr(self, testdir):
# https://github.com/pytest-dev/pytest/issues/1204
modcol = testdir.getmodulecol("""
# lineno 0
class TestClass:
def __getattr__(self, name):
return "this is not an int"
def test_foo(self):
pass
""")
classcol = testdir.collect_by_name(modcol, "TestClass")
instance = classcol.collect()[0]
fspath, lineno, msg = instance.reportinfo()
def test_customized_python_discovery(testdir):
testdir.makeini("""
[pytest]
python_files=check_*.py
python_classes=Check
python_functions=check
""")
p = testdir.makepyfile("""
def check_simple():
pass
class CheckMyApp:
def check_meth(self):
pass
""")
p2 = p.new(basename=p.basename.replace("test", "check"))
p.move(p2)
result = testdir.runpytest("--collect-only", "-s")
result.stdout.fnmatch_lines([
"*check_customized*",
"*check_simple*",
"*CheckMyApp*",
"*check_meth*",
])
result = testdir.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines([
"*2 passed*",
])
def test_customized_python_discovery_functions(testdir):
testdir.makeini("""
[pytest]
python_functions=_test
""")
testdir.makepyfile("""
def _test_underscore():
pass
""")
result = testdir.runpytest("--collect-only", "-s")
result.stdout.fnmatch_lines([
"*_test_underscore*",
])
result = testdir.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines([
"*1 passed*",
])
def test_collector_attributes(testdir):
testdir.makeconftest("""
import pytest
def pytest_pycollect_makeitem(collector):
assert collector.Function == pytest.Function
assert collector.Class == pytest.Class
assert collector.Instance == pytest.Instance
assert collector.Module == pytest.Module
""")
testdir.makepyfile("""
def test_hello():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 passed*",
])
def test_customize_through_attributes(testdir):
testdir.makeconftest("""
import pytest
class MyFunction(pytest.Function):
pass
class MyInstance(pytest.Instance):
Function = MyFunction
class MyClass(pytest.Class):
Instance = MyInstance
def pytest_pycollect_makeitem(collector, name, obj):
if name.startswith("MyTestClass"):
return MyClass(name, parent=collector)
""")
testdir.makepyfile("""
class MyTestClass:
def test_hello(self):
pass
""")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyClass*",
"*MyInstance*",
"*MyFunction*test_hello*",
])
def test_unorderable_types(testdir):
testdir.makepyfile("""
class TestJoinEmpty:
pass
def make_test():
class Test:
pass
Test.__name__ = "TestFoo"
return Test
TestFoo = make_test()
""")
result = testdir.runpytest()
assert "TypeError" not in result.stdout.str()
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_collect_functools_partial(testdir):
"""
Test that collection of functools.partial object works, and arguments
to the wrapped functions are dealt correctly (see #811).
"""
testdir.makepyfile("""
import functools
import pytest
@pytest.fixture
def fix1():
return 'fix1'
@pytest.fixture
def fix2():
return 'fix2'
def check1(i, fix1):
assert i == 2
assert fix1 == 'fix1'
def check2(fix1, i):
assert i == 2
assert fix1 == 'fix1'
def check3(fix1, i, fix2):
assert i == 2
assert fix1 == 'fix1'
assert fix2 == 'fix2'
test_ok_1 = functools.partial(check1, i=2)
test_ok_2 = functools.partial(check1, i=2, fix1='fix1')
test_ok_3 = functools.partial(check1, 2)
test_ok_4 = functools.partial(check2, i=2)
test_ok_5 = functools.partial(check3, i=2)
test_ok_6 = functools.partial(check3, i=2, fix1='fix1')
test_fail_1 = functools.partial(check2, 2)
test_fail_2 = functools.partial(check3, 2)
""")
result = testdir.inline_run()
result.assertoutcome(passed=6, failed=2)
def test_dont_collect_non_function_callable(testdir):
"""Test for issue https://github.com/pytest-dev/pytest/issues/331
In this case an INTERNALERROR occurred trying to report the failure of
a test like this one because py test failed to get the source lines.
"""
testdir.makepyfile("""
class Oh(object):
def __call__(self):
pass
test_a = Oh()
def test_real():
pass
""")
result = testdir.runpytest('-rw')
result.stdout.fnmatch_lines([
'*collected 1 item*',
'WC2 *',
'*1 passed, 1 pytest-warnings in *',
])
def test_class_injection_does_not_break_collection(testdir):
"""Tests whether injection during collection time will terminate testing.
In this case the error should not occur if the TestClass itself
is modified during collection time, and the original method list
is still used for collection.
"""
testdir.makeconftest("""
from test_inject import TestClass
def pytest_generate_tests(metafunc):
TestClass.changed_var = {}
""")
testdir.makepyfile(test_inject='''
class TestClass(object):
def test_injection(self):
"""Test being parametrized."""
pass
''')
result = testdir.runpytest()
assert "RuntimeError: dictionary changed size during iteration" not in result.stdout.str()
result.stdout.fnmatch_lines(['*1 passed*'])
| mit |
msmbuilder/msmbuilder | msmbuilder/utils/divergence.py | 3 | 2818 | from __future__ import print_function, division, absolute_import
import numpy as np
from scipy.stats import entropy
def scipy_kl_divergence(P, Q, scalar=True):
result = entropy(P.T, Q.T)
if scalar:
return np.sum(result)
else:
return result
def manual_kl_divergence(P, Q, scalar=True):
if len(P.shape) == 1:
P = np.array([P])
Q = np.array([Q])
vec = []
for row in range(P.shape[0]):
temp = 0
for i, entry in enumerate(P[row]):
if entry * Q[row][i] != 0: # i.e. one or both is not zero
temp += entry * np.log(entry / Q[row][i])
vec.append(temp)
result = np.array(vec)
# sometimes there are very tiny negative values close to zero
result = [np.max([i, 0]) for i in result]
if scalar:
return np.sum(result)
else:
return result
def kl_divergence(P, Q, manual=True, scalar=True):
if manual:
return manual_kl_divergence(P, Q, scalar=scalar)
else:
return scipy_kl_divergence(P, Q, scalar=scalar)
def sym_kl_divergence(P, Q, scalar=True):
return kl_divergence(P, Q, scalar=scalar) + kl_divergence(Q, P, scalar=scalar)
def js_divergence(P, Q, scalar=True):
M = np.mean([P, Q], axis=0)
return (0.5 * kl_divergence(P, M, scalar=scalar) +
0.5 * kl_divergence(Q, M, scalar=scalar))
def js_metric(P, Q, scalar=True):
return np.sqrt(js_divergence(P, Q, scalar=scalar))
def fnorm(P, Q):
return np.linalg.norm(P - Q, ord='fro')
def kl_divergence_array(ref, target, i):
return np.array([kl_divergence(ref[i], t) for t in target])
def sym_kl_divergence_array(ref, target, i):
return np.array([sym_kl_divergence(ref[i], t) for t in target])
def js_divergence_array(ref, target, i):
return np.array([js_divergence(ref[i], t) for t in target])
def js_metric_array(ref, target, i):
return np.array([js_metric(ref[i], t) for t in target])
def _fnorm_array(ref, target, i):
# hidden because you should never use fnorm with vectors
return np.array([fnorm(ref[i], t) for t in target])
def _make_square(sequence):
n_states = int(np.sqrt(len(sequence[0])))
return np.array([x.reshape(n_states, n_states) for x in sequence])
def kl_divergence_msm(ref, target, i):
return kl_divergence_array(_make_square(target), _make_square(ref), i)
def sym_kl_divergence_msm(ref, target, i):
return sym_kl_divergence_array(_make_square(target), _make_square(ref), i)
def js_divergence_msm(ref, target, i):
return js_divergence_array(_make_square(target), _make_square(ref), i)
def js_metric_msm(ref, target, i):
return js_metric_array(_make_square(target), _make_square(ref), i)
def fnorm_msm(ref, target, i):
return _fnorm_array(_make_square(target), _make_square(ref), i)
| lgpl-2.1 |
Dewpal/Humidistat | DHT22.py | 1 | 1400 | #!/usr/bin/python
import sys
import time
import datetime
import Adafruit_DHT
import plotly
import pandas as pd
import plotly.plotly as py
import plotly.graph_objs as go
# Parse command line parameters.
sensor = Adafruit_DHT.DHT22
pin = 21
# connect to plotly
plotly.tools.set_credentials_file(username='bramDeJaeg', api_key='jemhzjyun0')
# Parameters for data storage
Ts= 1; # sampling time (s)
nStore= 5 # number of datapoints to store
i=1
data= pd.DataFrame({'Time': 0,'Temperature': 0,'Humidity': 0}, columns=['Time','Temperature','Humidity'],index=range(0,nStore-1))
for i in range(0,nStore-1):
humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
if humidity is not None and temperature is not None:
data.loc[i]=pd.Series({'Time': datetime.datetime.now(),'Temperature': temperature,'Humidity': humidity})
else:
print('missed reading')
time.sleep(Ts)
i=i+1
trace=go.Scatter(
x= data.Time,
y= data.Humidity,
stream=dict(
token= "0f1psssxtu",
maxpoints= 200
)
)
layout = go.Layout(
title='RPi, DHT-sensor Data'
)
fig=go.Figure(data=[trace], layout=layout)
py.plot(fig,filename = 'basic_TH',)
stream=py.Stream('0f1psssxtu')
stream.open()
while True:
humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
i=i+1
stream.write({'x': datetime.datetime.now(), 'y': humidity})
time.sleep(Ts)
| mit |
HoverHell/mplh5canvas | examples/multi_plot.py | 4 | 1357 | #!/usr/bin/python
"""Testbed for the animation functionality of the backend, with multiple figures.
It basically produces an long series of frames that get animated on the client
browser side, this time with two figures.
"""
import matplotlib
matplotlib.use('module://mplh5canvas.backend_h5canvas')
from pylab import *
import time
def refresh_data(ax):
t = arange(0.0 + count, 2.0 + count, 0.01)
s = sin(2*pi*t)
ax.lines[0].set_xdata(t)
ax.lines[0].set_ydata(s)
ax.set_xlim(t[0],t[-1])
t = arange(0.0, 2.0, 0.01)
s = sin(2*pi*t)
plot(t, s, linewidth=1.0)
xlabel('time (s)')
ylabel('voltage (mV)')
title('Frist Post')
f = gcf()
ax = f.gca()
count = 0
f2 = figure()
ax2 = f2.gca()
ax2.set_xlabel('IMDB rating')
ax2.set_ylabel('South African Connections')
ax2.set_title('Luds chart...')
ax2.plot(arange(0.0, 5 + count, 0.01), arange(0.0, 5 + count, 0.01))
show(block=False, layout=2)
# show the figure manager but don't block script execution so animation works..
# layout=2 overrides the default layout manager which only shows a single plot in the browser window
while True:
refresh_data(ax)
d = arange(0.0, 5 + count, 0.01)
ax2.lines[0].set_xdata(d)
ax2.lines[0].set_ydata(d)
ax2.set_xlim(d[0],d[-1])
ax2.set_ylim(d[0],d[-1])
f.canvas.draw()
f2.canvas.draw()
count += 0.01
time.sleep(1)
| bsd-3-clause |
partofthething/home-assistant | tests/components/smart_meter_texas/test_init.py | 6 | 2552 | """Test the Smart Meter Texas module."""
from unittest.mock import patch
from homeassistant.components.homeassistant import (
DOMAIN as HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.components.smart_meter_texas.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_ERROR,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.setup import async_setup_component
from .conftest import TEST_ENTITY_ID, setup_integration
async def test_setup_with_no_config(hass):
"""Test that no config is successful."""
assert await async_setup_component(hass, DOMAIN, {}) is True
await hass.async_block_till_done()
# Assert no flows were started.
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_auth_failure(hass, config_entry, aioclient_mock):
"""Test if user's username or password is not accepted."""
await setup_integration(hass, config_entry, aioclient_mock, auth_fail=True)
assert config_entry.state == ENTRY_STATE_SETUP_ERROR
async def test_api_timeout(hass, config_entry, aioclient_mock):
"""Test that a timeout results in ConfigEntryNotReady."""
await setup_integration(hass, config_entry, aioclient_mock, auth_timeout=True)
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
async def test_update_failure(hass, config_entry, aioclient_mock):
"""Test that the coordinator handles a bad response."""
await setup_integration(hass, config_entry, aioclient_mock, bad_reading=True)
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
with patch("smart_meter_texas.Meter.read_meter") as updater:
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
updater.assert_called_once()
async def test_unload_config_entry(hass, config_entry, aioclient_mock):
"""Test entry unloading."""
await setup_integration(hass, config_entry, aioclient_mock)
config_entries = hass.config_entries.async_entries(DOMAIN)
assert len(config_entries) == 1
assert config_entries[0] is config_entry
assert config_entry.state == ENTRY_STATE_LOADED
await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_NOT_LOADED
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.