repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
totolef/Sick-beard | lib/imdb/parser/http/__init__.py | 50 | 37110 | """
parser.http package (imdb package).
This package provides the IMDbHTTPAccessSystem class used to access
IMDb's data through the web interface.
the imdb.IMDb function will return an instance of this class when
called with the 'accessSystem' argument set to "http" or "web"
or "html" (this is the default).
Copyright 2004-2012 Davide Alberani <da@erlug.linux.it>
2008 H. Turgut Uyar <uyar@tekir.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import sys
import socket
import logging
from urllib import FancyURLopener, quote_plus
from codecs import lookup
from imdb import IMDbBase, imdbURL_movie_main, imdbURL_person_main, \
imdbURL_character_main, imdbURL_company_main, \
imdbURL_keyword_main, imdbURL_find, imdbURL_top250, \
imdbURL_bottom100
from imdb.utils import analyze_title
from imdb._exceptions import IMDbDataAccessError, IMDbParserError
import searchMovieParser
import searchPersonParser
import searchCharacterParser
import searchCompanyParser
import searchKeywordParser
import movieParser
import personParser
import characterParser
import companyParser
import topBottomParser
# Logger for miscellaneous functions.
_aux_logger = logging.getLogger('imdbpy.parser.http.aux')
IN_GAE = False
try:
import google.appengine
IN_GAE = True
_aux_logger.info('IMDbPY is running in the Google App Engine environment')
except ImportError:
pass
class _ModuleProxy:
"""A proxy to instantiate and access parsers."""
def __init__(self, module, defaultKeys=None, oldParsers=False,
useModule=None, fallBackToNew=False):
"""Initialize a proxy for the given module; defaultKeys, if set,
muste be a dictionary of values to set for instanced objects."""
if oldParsers or fallBackToNew:
_aux_logger.warn('The old set of parsers was removed; falling ' \
'back to the new parsers.')
self.useModule = useModule
if defaultKeys is None:
defaultKeys = {}
self._defaultKeys = defaultKeys
self._module = module
def __getattr__(self, name):
"""Called only when no look-up is found."""
_sm = self._module
# Read the _OBJECTS dictionary to build the asked parser.
if name in _sm._OBJECTS:
_entry = _sm._OBJECTS[name]
# Initialize the parser.
kwds = {}
if self.useModule:
kwds = {'useModule': self.useModule}
parserClass = _entry[0][0]
obj = parserClass(**kwds)
attrsToSet = self._defaultKeys.copy()
attrsToSet.update(_entry[1] or {})
# Set attribute to the object.
for key in attrsToSet:
setattr(obj, key, attrsToSet[key])
setattr(self, name, obj)
return obj
return getattr(_sm, name)
PY_VERSION = sys.version_info[:2]
# The cookies for the "adult" search.
# Please don't mess with these account.
# Old 'IMDbPY' account.
_old_cookie_id = 'boM2bYxz9MCsOnH9gZ0S9QHs12NWrNdApxsls1Vb5/NGrNdjcHx3dUas10UASoAjVEvhAbGagERgOpNkAPvxdbfKwaV2ikEj9SzXY1WPxABmDKQwdqzwRbM+12NSeJFGUEx3F8as10WwidLzVshDtxaPIbP13NdjVS9UZTYqgTVGrNcT9vyXU1'
_old_cookie_uu = '3M3AXsquTU5Gur/Svik+ewflPm5Rk2ieY3BIPlLjyK3C0Dp9F8UoPgbTyKiGtZp4x1X+uAUGKD7BM2g+dVd8eqEzDErCoYvdcvGLvVLAen1y08hNQtALjVKAe+1hM8g9QbNonlG1/t4S82ieUsBbrSIQbq1yhV6tZ6ArvSbA7rgHc8n5AdReyAmDaJ5Wm/ee3VDoCnGj/LlBs2ieUZNorhHDKK5Q=='
# New 'IMDbPYweb' account.
_cookie_id = 'rH1jNAkjTlNXvHolvBVBsgaPICNZbNdjVjzFwzas9JRmusdjVoqBs/Hs12NR+1WFxEoR9bGKEDUg6sNlADqXwkas12N131Rwdb+UQNGKN8PWrNdjcdqBQVLq8mbGDHP3hqzxhbD692NQi9D0JjpBtRaPIbP1zNdjUOqENQYv1ADWrNcT9vyXU1'
_cookie_uu = 'su4/m8cho4c6HP+W1qgq6wchOmhnF0w+lIWvHjRUPJ6nRA9sccEafjGADJ6hQGrMd4GKqLcz2X4z5+w+M4OIKnRn7FpENH7dxDQu3bQEHyx0ZEyeRFTPHfQEX03XF+yeN1dsPpcXaqjUZAw+lGRfXRQEfz3RIX9IgVEffdBAHw2wQXyf9xdMPrQELw0QNB8dsffsqcdQemjPB0w+moLcPh0JrKrHJ9hjBzdMPpcXTH7XRwwOk='
# imdbpy2010 account.
#_cookie_id = 'QrCdxVi+L+WgqOLrQJJgBgRRXGInphxiBPU/YXSFDyExMFzCp6YcYgSVXyEUhS/xMID8wqemHGID4DlntwZ49vemP5UXsAxiJ4D6goSmHGIgNT9hMXBaRSF2vMS3phxB0bVfQiQlP1RxdrzhB6YcRHFASyIhQVowwXCKtDSlD2YhgRvxBsCKtGemHBKH9mxSI='
#_cookie_uu = 'oiEo2yoJFCA2Zbn/o7Z1LAPIwotAu6QdALv3foDb1x5F/tdrFY63XkSfty4kntS8Y8jkHSDLt3406+d+JThEilPI0mtTaOQdA/t2/iErp22jaLdeVU5ya4PIREpj7HFdpzhEHadcIAngSER50IoHDpD6Bz4Qy3b+UIhE/hBbhz5Q63ceA2hEvhPo5B0FnrL9Q8jkWjDIbA0Au3d+AOtnXoCIRL4Q28c+UOtnXpP4RL4T6OQdA+6ijUCI5B0AW2d+UOtnXpPYRL4T6OQdA8jkTUOYlC0A=='
class _FakeURLOpener(object):
"""Fake URLOpener object, used to return empty strings instead of
errors.
"""
def __init__(self, url, headers):
self.url = url
self.headers = headers
def read(self, *args, **kwds): return ''
def close(self, *args, **kwds): pass
def info(self, *args, **kwds): return self.headers
class IMDbURLopener(FancyURLopener):
"""Fetch web pages and handle errors."""
_logger = logging.getLogger('imdbpy.parser.http.urlopener')
def __init__(self, *args, **kwargs):
self._last_url = u''
FancyURLopener.__init__(self, *args, **kwargs)
# Headers to add to every request.
# XXX: IMDb's web server doesn't like urllib-based programs,
# so lets fake to be Mozilla.
# Wow! I'm shocked by my total lack of ethic! <g>
for header in ('User-Agent', 'User-agent', 'user-agent'):
self.del_header(header)
self.set_header('User-Agent', 'Mozilla/5.0')
# XXX: This class is used also to perform "Exact Primary
# [Title|Name]" searches, and so by default the cookie is set.
c_header = 'id=%s; uu=%s' % (_cookie_id, _cookie_uu)
self.set_header('Cookie', c_header)
def get_proxy(self):
"""Return the used proxy, or an empty string."""
return self.proxies.get('http', '')
def set_proxy(self, proxy):
"""Set the proxy."""
if not proxy:
if self.proxies.has_key('http'):
del self.proxies['http']
else:
if not proxy.lower().startswith('http://'):
proxy = 'http://%s' % proxy
self.proxies['http'] = proxy
def set_header(self, header, value, _overwrite=True):
"""Set a default header."""
if _overwrite:
self.del_header(header)
self.addheaders.append((header, value))
def get_header(self, header):
"""Return the first value of a header, or None
if not present."""
for index in xrange(len(self.addheaders)):
if self.addheaders[index][0] == header:
return self.addheaders[index][1]
return None
def del_header(self, header):
"""Remove a default header."""
for index in xrange(len(self.addheaders)):
if self.addheaders[index][0] == header:
del self.addheaders[index]
break
def retrieve_unicode(self, url, size=-1):
"""Retrieves the given URL, and returns a unicode string,
trying to guess the encoding of the data (assuming latin_1
by default)"""
encode = None
try:
if size != -1:
self.set_header('Range', 'bytes=0-%d' % size)
uopener = self.open(url)
kwds = {}
if PY_VERSION > (2, 3) and not IN_GAE:
kwds['size'] = size
content = uopener.read(**kwds)
self._last_url = uopener.url
# Maybe the server is so nice to tell us the charset...
server_encode = uopener.info().getparam('charset')
# Otherwise, look at the content-type HTML meta tag.
if server_encode is None and content:
first_bytes = content[:512]
begin_h = first_bytes.find('text/html; charset=')
if begin_h != -1:
end_h = first_bytes[19+begin_h:].find('"')
if end_h != -1:
server_encode = first_bytes[19+begin_h:19+begin_h+end_h]
if server_encode:
try:
if lookup(server_encode):
encode = server_encode
except (LookupError, ValueError, TypeError):
pass
uopener.close()
if size != -1:
self.del_header('Range')
self.close()
except IOError, e:
if size != -1:
# Ensure that the Range header is removed.
self.del_header('Range')
raise IMDbDataAccessError({'errcode': e.errno,
'errmsg': str(e.strerror),
'url': url,
'proxy': self.get_proxy(),
'exception type': 'IOError',
'original exception': e})
if encode is None:
encode = 'latin_1'
# The detection of the encoding is error prone...
self._logger.warn('Unable to detect the encoding of the retrieved '
'page [%s]; falling back to default latin1.', encode)
##print unicode(content, encode, 'replace').encode('utf8')
return unicode(content, encode, 'replace')
def http_error_default(self, url, fp, errcode, errmsg, headers):
if errcode == 404:
self._logger.warn('404 code returned for %s: %s (headers: %s)',
url, errmsg, headers)
return _FakeURLOpener(url, headers)
raise IMDbDataAccessError({'url': 'http:%s' % url,
'errcode': errcode,
'errmsg': errmsg,
'headers': headers,
'error type': 'http_error_default',
'proxy': self.get_proxy()})
def open_unknown(self, fullurl, data=None):
raise IMDbDataAccessError({'fullurl': fullurl,
'data': str(data),
'error type': 'open_unknown',
'proxy': self.get_proxy()})
def open_unknown_proxy(self, proxy, fullurl, data=None):
raise IMDbDataAccessError({'proxy': str(proxy),
'fullurl': fullurl,
'error type': 'open_unknown_proxy',
'data': str(data)})
class IMDbHTTPAccessSystem(IMDbBase):
"""The class used to access IMDb's data through the web."""
accessSystem = 'http'
_http_logger = logging.getLogger('imdbpy.parser.http')
def __init__(self, isThin=0, adultSearch=1, proxy=-1, oldParsers=False,
fallBackToNew=False, useModule=None, cookie_id=-1,
timeout=30, cookie_uu=None, *arguments, **keywords):
"""Initialize the access system."""
IMDbBase.__init__(self, *arguments, **keywords)
self.urlOpener = IMDbURLopener()
# When isThin is set, we're parsing the "maindetails" page
# of a movie (instead of the "combined" page) and movie/person
# references are not collected if no defaultModFunct is provided.
#
# NOTE: httpThin was removed since IMDbPY 4.8.
self.isThin = isThin
self._getRefs = True
self._mdparse = False
if isThin:
self._http_logger.warn('"httpThin" access system no longer ' +
'supported; "http" used automatically', exc_info=False)
self.isThin = 0
if self.accessSystem in ('httpThin', 'webThin', 'htmlThin'):
self.accessSystem = 'http'
self.set_timeout(timeout)
self.do_adult_search(adultSearch)
if cookie_id != -1:
if cookie_id is None:
self.del_cookies()
elif cookie_uu is not None:
self.set_cookies(cookie_id, cookie_uu)
if proxy != -1:
self.set_proxy(proxy)
if useModule is not None:
if not isinstance(useModule, (list, tuple)) and ',' in useModule:
useModule = useModule.split(',')
_def = {'_modFunct': self._defModFunct, '_as': self.accessSystem}
# Proxy objects.
self.smProxy = _ModuleProxy(searchMovieParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.spProxy = _ModuleProxy(searchPersonParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.scProxy = _ModuleProxy(searchCharacterParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.scompProxy = _ModuleProxy(searchCompanyParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.skProxy = _ModuleProxy(searchKeywordParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.mProxy = _ModuleProxy(movieParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.pProxy = _ModuleProxy(personParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.cProxy = _ModuleProxy(characterParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.compProxy = _ModuleProxy(companyParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.topBottomProxy = _ModuleProxy(topBottomParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
def _normalize_movieID(self, movieID):
"""Normalize the given movieID."""
try:
return '%07d' % int(movieID)
except ValueError, e:
raise IMDbParserError('invalid movieID "%s": %s' % (movieID, e))
def _normalize_personID(self, personID):
"""Normalize the given personID."""
try:
return '%07d' % int(personID)
except ValueError, e:
raise IMDbParserError('invalid personID "%s": %s' % (personID, e))
def _normalize_characterID(self, characterID):
"""Normalize the given characterID."""
try:
return '%07d' % int(characterID)
except ValueError, e:
raise IMDbParserError('invalid characterID "%s": %s' % \
(characterID, e))
def _normalize_companyID(self, companyID):
"""Normalize the given companyID."""
try:
return '%07d' % int(companyID)
except ValueError, e:
raise IMDbParserError('invalid companyID "%s": %s' % \
(companyID, e))
def get_imdbMovieID(self, movieID):
"""Translate a movieID in an imdbID; in this implementation
the movieID _is_ the imdbID.
"""
return movieID
def get_imdbPersonID(self, personID):
"""Translate a personID in an imdbID; in this implementation
the personID _is_ the imdbID.
"""
return personID
def get_imdbCharacterID(self, characterID):
"""Translate a characterID in an imdbID; in this implementation
the characterID _is_ the imdbID.
"""
return characterID
def get_imdbCompanyID(self, companyID):
"""Translate a companyID in an imdbID; in this implementation
the companyID _is_ the imdbID.
"""
return companyID
def get_proxy(self):
"""Return the used proxy or an empty string."""
return self.urlOpener.get_proxy()
def set_proxy(self, proxy):
"""Set the web proxy to use.
It should be a string like 'http://localhost:8080/'; if the
string is empty, no proxy will be used.
If set, the value of the environment variable HTTP_PROXY is
automatically used.
"""
self.urlOpener.set_proxy(proxy)
def set_timeout(self, timeout):
"""Set the default timeout, in seconds, of the connection."""
try:
timeout = int(timeout)
except Exception:
timeout = 0
if timeout <= 0:
timeout = None
socket.setdefaulttimeout(timeout)
def set_cookies(self, cookie_id, cookie_uu):
"""Set a cookie to access an IMDb's account."""
c_header = 'id=%s; uu=%s' % (cookie_id, cookie_uu)
self.urlOpener.set_header('Cookie', c_header)
def del_cookies(self):
"""Remove the used cookie."""
self.urlOpener.del_header('Cookie')
def do_adult_search(self, doAdult,
cookie_id=_cookie_id, cookie_uu=_cookie_uu):
"""If doAdult is true, 'adult' movies are included in the
search results; cookie_id and cookie_uu are optional
parameters to select a specific account (see your cookie
or cookies.txt file."""
if doAdult:
self.set_cookies(cookie_id, cookie_uu)
#c_header = 'id=%s; uu=%s' % (cookie_id, cookie_uu)
#self.urlOpener.set_header('Cookie', c_header)
else:
self.urlOpener.del_header('Cookie')
def _retrieve(self, url, size=-1, _noCookies=False):
"""Retrieve the given URL."""
##print url
_cookies = None
# XXX: quite obscene, but in some very limited
# cases (/ttXXXXXXX/epdate) if the cookies
# are set, a 500 error is returned.
if _noCookies:
_cookies = self.urlOpener.get_header('Cookie')
self.del_cookies()
self._http_logger.debug('fetching url %s (size: %d)', url, size)
try:
ret = self.urlOpener.retrieve_unicode(url, size=size)
finally:
if _noCookies and _cookies:
self.urlOpener.set_header('Cookie', _cookies)
return ret
def _get_search_content(self, kind, ton, results):
"""Retrieve the web page for a given search.
kind can be 'tt' (for titles), 'nm' (for names),
'char' (for characters) or 'co' (for companies).
ton is the title or the name to search.
results is the maximum number of results to be retrieved."""
if isinstance(ton, unicode):
try:
ton = ton.encode('iso8859-1')
except Exception, e:
try:
ton = ton.encode('utf-8')
except Exception, e:
pass
##params = 'q=%s&%s=on&mx=%s' % (quote_plus(ton), kind, str(results))
params = 'q=%s;s=%s;mx=%s' % (quote_plus(ton), kind, str(results))
if kind == 'ep':
params = params.replace('s=ep;', 's=tt;ttype=ep;', 1)
cont = self._retrieve(self.urls['find'] % params)
#print 'URL:', imdbURL_find % params
if cont.find('Your search returned more than') == -1 or \
cont.find("displayed the exact matches") == -1:
return cont
# The retrieved page contains no results, because too many
# titles or names contain the string we're looking for.
params = 'q=%s;ls=%s;lm=0' % (quote_plus(ton), kind)
size = 131072 + results * 512
return self._retrieve(self.urls['find'] % params, size=size)
def _search_movie(self, title, results):
# The URL of the query.
# XXX: To retrieve the complete results list:
# params = urllib.urlencode({'more': 'tt', 'q': title})
##params = urllib.urlencode({'tt': 'on','mx': str(results),'q': title})
##params = 'q=%s&tt=on&mx=%s' % (quote_plus(title), str(results))
##cont = self._retrieve(imdbURL_find % params)
cont = self._get_search_content('tt', title, results)
return self.smProxy.search_movie_parser.parse(cont, results=results)['data']
def _search_episode(self, title, results):
t_dict = analyze_title(title)
if t_dict['kind'] == 'episode':
title = t_dict['title']
cont = self._get_search_content('ep', title, results)
return self.smProxy.search_movie_parser.parse(cont, results=results)['data']
def get_movie_main(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'combined')
return self.mProxy.movie_parser.parse(cont, mdparse=self._mdparse)
def get_movie_full_credits(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'fullcredits')
return self.mProxy.movie_parser.parse(cont)
def get_movie_plot(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'plotsummary')
return self.mProxy.plot_parser.parse(cont, getRefs=self._getRefs)
def get_movie_awards(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'awards')
return self.mProxy.movie_awards_parser.parse(cont)
def get_movie_taglines(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'taglines')
return self.mProxy.taglines_parser.parse(cont)
def get_movie_keywords(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'keywords')
return self.mProxy.keywords_parser.parse(cont)
def get_movie_alternate_versions(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'alternateversions')
return self.mProxy.alternateversions_parser.parse(cont,
getRefs=self._getRefs)
def get_movie_crazy_credits(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'crazycredits')
return self.mProxy.crazycredits_parser.parse(cont,
getRefs=self._getRefs)
def get_movie_goofs(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'goofs')
return self.mProxy.goofs_parser.parse(cont, getRefs=self._getRefs)
def get_movie_quotes(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'quotes')
return self.mProxy.quotes_parser.parse(cont, getRefs=self._getRefs)
def get_movie_release_dates(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'releaseinfo')
ret = self.mProxy.releasedates_parser.parse(cont)
ret['info sets'] = ('release dates', 'akas')
return ret
get_movie_akas = get_movie_release_dates
get_movie_release_info = get_movie_release_dates
def get_movie_vote_details(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'ratings')
return self.mProxy.ratings_parser.parse(cont)
def get_movie_official_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'officialsites')
return self.mProxy.officialsites_parser.parse(cont)
def get_movie_trivia(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'trivia')
return self.mProxy.trivia_parser.parse(cont, getRefs=self._getRefs)
def get_movie_connections(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'movieconnections')
return self.mProxy.connections_parser.parse(cont)
def get_movie_technical(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'technical')
return self.mProxy.tech_parser.parse(cont)
def get_movie_business(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'business')
return self.mProxy.business_parser.parse(cont, getRefs=self._getRefs)
def get_movie_literature(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'literature')
return self.mProxy.literature_parser.parse(cont)
def get_movie_locations(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'locations')
return self.mProxy.locations_parser.parse(cont)
def get_movie_soundtrack(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'soundtrack')
return self.mProxy.soundtrack_parser.parse(cont)
def get_movie_dvd(self, movieID):
self._http_logger.warn('dvd information no longer available', exc_info=False)
return {}
def get_movie_recommendations(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'recommendations')
return self.mProxy.rec_parser.parse(cont)
def get_movie_external_reviews(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'externalreviews')
return self.mProxy.externalrev_parser.parse(cont)
def get_movie_newsgroup_reviews(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'newsgroupreviews')
return self.mProxy.newsgrouprev_parser.parse(cont)
def get_movie_misc_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'miscsites')
return self.mProxy.misclinks_parser.parse(cont)
def get_movie_sound_clips(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'soundsites')
return self.mProxy.soundclips_parser.parse(cont)
def get_movie_video_clips(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'videosites')
return self.mProxy.videoclips_parser.parse(cont)
def get_movie_photo_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'photosites')
return self.mProxy.photosites_parser.parse(cont)
def get_movie_news(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'news')
return self.mProxy.news_parser.parse(cont, getRefs=self._getRefs)
def get_movie_amazon_reviews(self, movieID):
self._http_logger.warn('amazon review no longer available', exc_info=False)
return {}
def get_movie_guests(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'epcast')
return self.mProxy.episodes_cast_parser.parse(cont)
get_movie_episodes_cast = get_movie_guests
def get_movie_merchandising_links(self, movieID):
self._http_logger.warn('merchandising links no longer available',
exc_info=False)
return {}
def _purge_seasons_data(self, data_d):
if '_current_season' in data_d['data']:
del data_d['data']['_current_season']
if '_seasons' in data_d['data']:
del data_d['data']['_seasons']
return data_d
def get_movie_episodes(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'episodes')
data_d = self.mProxy.season_episodes_parser.parse(cont)
if not data_d and 'data' in data_d:
return {}
_current_season = data_d['data'].get('_current_season', '')
_seasons = data_d['data'].get('_seasons') or []
data_d = self._purge_seasons_data(data_d)
data_d['data'].setdefault('episodes', {})
nr_eps = len(data_d['data']['episodes'].get(_current_season) or [])
for season in _seasons:
if season == _current_season:
continue
other_cont = self._retrieve(self.urls['movie_main'] % movieID + 'episodes?season=' + str(season))
other_d = self.mProxy.season_episodes_parser.parse(other_cont)
other_d = self._purge_seasons_data(other_d)
other_d['data'].setdefault('episodes', {})
if not (other_d and other_d['data'] and other_d['data']['episodes'][season]):
continue
nr_eps += len(other_d['data']['episodes'].get(season) or [])
data_d['data']['episodes'][season] = other_d['data']['episodes'][season]
data_d['data']['number of episodes'] = nr_eps
return data_d
def get_movie_episodes_rating(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'epdate', _noCookies=True)
data_d = self.mProxy.eprating_parser.parse(cont)
# set movie['episode of'].movieID for every episode.
if data_d.get('data', {}).has_key('episodes rating'):
for item in data_d['data']['episodes rating']:
episode = item['episode']
episode['episode of'].movieID = movieID
return data_d
def get_movie_faqs(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'faq')
return self.mProxy.movie_faqs_parser.parse(cont, getRefs=self._getRefs)
def get_movie_airing(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'tvschedule')
return self.mProxy.airing_parser.parse(cont)
get_movie_tv_schedule = get_movie_airing
def get_movie_synopsis(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'synopsis')
return self.mProxy.synopsis_parser.parse(cont)
def get_movie_parents_guide(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'parentalguide')
return self.mProxy.parentsguide_parser.parse(cont)
def _search_person(self, name, results):
# The URL of the query.
# XXX: To retrieve the complete results list:
# params = urllib.urlencode({'more': 'nm', 'q': name})
##params = urllib.urlencode({'nm': 'on', 'mx': str(results), 'q': name})
#params = 'q=%s&nm=on&mx=%s' % (quote_plus(name), str(results))
#cont = self._retrieve(imdbURL_find % params)
cont = self._get_search_content('nm', name, results)
return self.spProxy.search_person_parser.parse(cont, results=results)['data']
def get_person_main(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'maindetails')
ret = self.pProxy.maindetails_parser.parse(cont)
ret['info sets'] = ('main', 'filmography')
return ret
def get_person_filmography(self, personID):
return self.get_person_main(personID)
def get_person_biography(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'bio')
return self.pProxy.bio_parser.parse(cont, getRefs=self._getRefs)
def get_person_awards(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'awards')
return self.pProxy.person_awards_parser.parse(cont)
def get_person_other_works(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'otherworks')
return self.pProxy.otherworks_parser.parse(cont, getRefs=self._getRefs)
#def get_person_agent(self, personID):
# cont = self._retrieve(self.urls['person_main'] % personID + 'agent')
# return self.pProxy.agent_parser.parse(cont)
def get_person_publicity(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'publicity')
return self.pProxy.publicity_parser.parse(cont)
def get_person_official_sites(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'officialsites')
return self.pProxy.person_officialsites_parser.parse(cont)
def get_person_news(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'news')
return self.pProxy.news_parser.parse(cont)
def get_person_episodes(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmoseries')
return self.pProxy.person_series_parser.parse(cont)
def get_person_merchandising_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'forsale')
return self.pProxy.sales_parser.parse(cont)
def get_person_genres_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmogenre')
return self.pProxy.person_genres_parser.parse(cont)
def get_person_keywords_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmokey')
return self.pProxy.person_keywords_parser.parse(cont)
def _search_character(self, name, results):
cont = self._get_search_content('char', name, results)
return self.scProxy.search_character_parser.parse(cont, results=results)['data']
def get_character_main(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID)
ret = self.cProxy.character_main_parser.parse(cont)
ret['info sets'] = ('main', 'filmography')
return ret
get_character_filmography = get_character_main
def get_character_biography(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID + 'bio')
return self.cProxy.character_bio_parser.parse(cont,
getRefs=self._getRefs)
def get_character_episodes(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID +
'filmoseries')
return self.cProxy.character_series_parser.parse(cont)
def get_character_quotes(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID + 'quotes')
return self.cProxy.character_quotes_parser.parse(cont,
getRefs=self._getRefs)
def _search_company(self, name, results):
cont = self._get_search_content('co', name, results)
url = self.urlOpener._last_url
return self.scompProxy.search_company_parser.parse(cont, url=url,
results=results)['data']
def get_company_main(self, companyID):
cont = self._retrieve(self.urls['company_main'] % companyID)
ret = self.compProxy.company_main_parser.parse(cont)
return ret
def _search_keyword(self, keyword, results):
# XXX: the IMDb web server seems to have some serious problem with
# non-ascii keyword.
# E.g.: http://akas.imdb.com/keyword/fianc%E9/
# will return a 500 Internal Server Error: Redirect Recursion.
keyword = keyword.encode('utf8', 'ignore')
try:
cont = self._get_search_content('kw', keyword, results)
except IMDbDataAccessError:
self._http_logger.warn('unable to search for keyword %s', keyword,
exc_info=True)
return []
return self.skProxy.search_keyword_parser.parse(cont, results=results)['data']
def _get_keyword(self, keyword, results):
keyword = keyword.encode('utf8', 'ignore')
try:
cont = self._retrieve(self.urls['keyword_main'] % keyword)
except IMDbDataAccessError:
self._http_logger.warn('unable to get keyword %s', keyword,
exc_info=True)
return []
return self.skProxy.search_moviekeyword_parser.parse(cont, results=results)['data']
def _get_top_bottom_movies(self, kind):
if kind == 'top':
parser = self.topBottomProxy.top250_parser
url = self.urls['top250']
elif kind == 'bottom':
parser = self.topBottomProxy.bottom100_parser
url = self.urls['bottom100']
else:
return []
cont = self._retrieve(url)
return parser.parse(cont)['data']
| gpl-3.0 |
ekherit/JpsiKK | share/mkpbs.py | 1 | 2570 | #!/usr/bin/python
import os
#import stat
import sys
import re
import random
from optparse import OptionParser
from stat import *
#create all file list (used for path.walk
def proceed_create_file_list(filelist, directory, files):
for file in files:
filelist += [os.path.join(directory,file)]
filelist.sort()
#create all file list in directory
def create_file_list(directory):
files = []
os.path.walk(directory, proceed_create_file_list, files)
return files;
def filter_file_list(files, reg):
r = re.compile(reg)
filtered_file_list = []
for file in files:
if re.match(r,file):
filtered_file_list += [file]
return filtered_file_list
def group_files(file_list, n) :
big_file_list=[]
count=0
tmp_list=[]
for f in file_list:
if count <n:
tmp_list.append(f)
else:
big_file_list.append(tmp_list)
tmp_list=[]
count = 0
tmp_list.append(f)
count=count+1
big_file_list.append(tmp_list)
return big_file_list
options = OptionParser()
options.add_option("-p", "--job_prefix", dest="job_prefix", help="Prefix for the pbs job", default="test")
options.add_option("-n", "--runs_per_job",type="int", dest="run_number", default=1, help="Number of runs per job")
options.add_option("-q", "--queue",dest="queue", default="besq", help="Queue name")
(opt, args) = options.parse_args()
dir=args[0]
absdir = os.path.abspath(dir)
files = filter_file_list(create_file_list(dir),".+.cfg$")
files.sort()
groups = group_files(files,opt.run_number)
pbs_file_list = []
i=0
for flist in groups:
tcsh_file_name = "%s-%04d.tcsh" % (opt.job_prefix, i)
pbs_name = "%s/%s" % (dir , tcsh_file_name )
pbs_file = open(pbs_name, 'w')
s="""#!/bin/tcsh
#PBS -N """ + tcsh_file_name + """
#PBS -o """ + tcsh_file_name + """.log
#PBS -j oe
##PBS -q besq
source /ihepbatch/bes/nikolaev/bin/boss664
cd """ + absdir + """
"""
for f in flist:
log = os.path.abspath(os.path.splitext(f)[0]+".log")
s = s+ "boss.exe "+os.path.abspath(f)+" >& " + log + "\n"
pbs_file.write(s)
pbs_file_list.append(pbs_name)
os.chmod(pbs_name, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH)
i=i+1
submit_file_name = dir+"/"+"submit.sh"
submit_file = open(submit_file_name, 'w')
s = "#!/bin/bash\n"
for pbs_file in pbs_file_list:
s = s + "qsub -q " + opt.queue + " " + os.path.abspath(pbs_file) + "\n"
submit_file.write(s)
os.chmod(submit_file_name, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH)
| gpl-3.0 |
Kalyzee/edx-platform | common/lib/xmodule/xmodule/tests/rendering/core.py | 208 | 3312 | """
This module is indended to provide a pluggable way to add assertions about
the rendered content of XBlocks.
For each view on the XBlock, this module defines a @singledispatch function
that can be used to test the contents of the rendered html.
The functions are of the form:
@singledispatch
def assert_student_view_valid_html(block, html):
'''
block: The block that rendered the HTML
html: An lxml.html parse of the HTML for this block
'''
...
assert foo
...
for child in children:
assert_xblock_html(child, child_html)
@singledispatch
def assert_student_view_invalid_html(block, html):
'''
block: The block that rendered the HTML
html: A string of unparsable html
'''
...
assert foo
...
for child in children:
assert_xblock_html(child, child_html)
...
A further extension would be to provide a companion set of functions that
resources that are provided to the Fragment
"""
import lxml.html
import lxml.etree
from singledispatch import singledispatch
@singledispatch
def assert_student_view_valid_html(block, html):
"""
Asserts that the html generated by the `student_view` view is correct for
the supplied block
:param block: The :class:`XBlock` that generated the html
:param html: The generated html as parsed by lxml.html
"""
pass
@singledispatch
def assert_studio_view_valid_html(block, html):
"""
Asserts that the html generated by the `studio_view` view is correct for
the supplied block
:param block: The :class:`XBlock` that generated the html
:param html: The generated html as parsed by lxml.html
"""
pass
@singledispatch
def assert_student_view_invalid_html(block, html):
"""
Asserts that the html generated by the `student_view` view is correct for
the supplied block, given that html wasn't parsable
:param block: The :class:`XBlock` that generated the html
:param html: A string, not parseable as html
"""
assert False, "student_view should produce valid html"
@singledispatch
def assert_studio_view_invalid_html(block, html):
"""
Asserts that the html generated by the `studio_view` view is correct for
the supplied block
:param block: The :class:`XBlock` that generated the html
:param html: A string, not parseable as html
"""
assert False, "studio_view should produce valid html"
def assert_student_view(block, fragment):
"""
Helper function to assert that the `fragment` is valid output
the specified `block`s `student_view`
"""
try:
html = lxml.html.fragment_fromstring(fragment.content)
except lxml.etree.ParserError:
assert_student_view_invalid_html(block, fragment.content)
else:
assert_student_view_valid_html(block, html)
def assert_studio_view(block, fragment):
"""
Helper function to assert that the `fragment` is valid output
the specified `block`s `studio_view`
"""
try:
html = lxml.html.fragment_fromstring(fragment.content)
except lxml.etree.ParserError:
assert_studio_view_invalid_html(block, fragment.content)
else:
assert_studio_view_valid_html(block, html)
| agpl-3.0 |
mitsei/dlkit | dlkit/authz_adapter/authentication_process/managers.py | 1 | 4428 | """AuthZ Adapter implementations of authentication.process managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import sessions
from ..osid import managers as osid_managers
from ..osid.osid_errors import Unimplemented
from ..osid.osid_errors import Unimplemented, OperationFailed, Unsupported
from ..primitives import Id
from ..utilities import raise_null_argument
from dlkit.manager_impls.authentication_process import managers as authentication_process_managers
class AuthenticationProcessProfile(osid_managers.OsidProfile, authentication_process_managers.AuthenticationProcessProfile):
"""Adapts underlying AuthenticationProcessProfile methodswith authorization checks."""
def __init__(self):
osid_managers.OsidProfile.__init__(self)
def _get_hierarchy_session(self, proxy=None):
if proxy is not None:
try:
return self._provider_manager.get_agency_hierarchy_session(proxy)
except Unimplemented:
return None
try:
return self._provider_manager.get_agency_hierarchy_session()
except Unimplemented:
return None
def get_authentication_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_authentication_record_types()
authentication_record_types = property(fget=get_authentication_record_types)
def get_authentication_input_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_authentication_input_record_types()
authentication_input_record_types = property(fget=get_authentication_input_record_types)
def get_challenge_record_types(self):
# Implemented from azosid template for -
# osid.resource.ResourceProfile.get_resource_record_types
return self._provider_manager.get_challenge_record_types()
challenge_record_types = property(fget=get_challenge_record_types)
def get_credential_types(self):
# Implemented from azosid template for -
# osid.repository.RepositoryProfile.get_coordinate_types
return self._provider_manager.get_credential_types()
credential_types = property(fget=get_credential_types)
def get_trust_types(self):
# Implemented from azosid template for -
# osid.repository.RepositoryProfile.get_coordinate_types
return self._provider_manager.get_trust_types()
trust_types = property(fget=get_trust_types)
class AuthenticationProcessManager(osid_managers.OsidManager, AuthenticationProcessProfile, authentication_process_managers.AuthenticationProcessManager):
"""Adapts underlying AuthenticationProcessManager methodswith authorization checks."""
def __init__(self):
AuthenticationProcessProfile.__init__(self)
def initialize(self, runtime):
osid_managers.OsidManager.initialize(self, runtime)
config = self._my_runtime.get_configuration()
parameter_id = Id('parameter:authentication_processProviderImpl@authz_adapter')
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
self._provider_manager = runtime.get_manager('AUTHENTICATION_PROCESS', provider_impl)
# need to add version argument
class AuthenticationProcessProxyManager(osid_managers.OsidProxyManager, AuthenticationProcessProfile, authentication_process_managers.AuthenticationProcessProxyManager):
"""Adapts underlying AuthenticationProcessProxyManager methodswith authorization checks."""
def __init__(self):
AuthenticationProcessProfile.__init__(self)
def initialize(self, runtime):
osid_managers.OsidProxyManager.initialize(self, runtime)
config = self._my_runtime.get_configuration()
parameter_id = Id('parameter:authentication_processProviderImpl@authz_adapter')
provider_impl = config.get_value_by_parameter(parameter_id).get_string_value()
self._provider_manager = runtime.get_proxy_manager('AUTHENTICATION_PROCESS', provider_impl)
# need to add version argument
| mit |
anish/buildbot | worker/buildbot_worker/commands/fs.py | 1 | 10005 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import glob
import os
import shutil
import sys
from twisted.internet import defer
from twisted.internet import threads
from twisted.python import log
from twisted.python import runtime
from buildbot_worker import runprocess
from buildbot_worker.commands import base
from buildbot_worker.commands import utils
class MakeDirectory(base.Command):
header = "mkdir"
# args['dir'] is relative to Builder directory, and is required.
requiredArgs = ['dir']
def start(self):
dirname = os.path.join(self.builder.basedir, self.args['dir'])
try:
if not os.path.isdir(dirname):
os.makedirs(dirname)
self.sendStatus({'rc': 0})
except OSError as e:
log.msg("MakeDirectory {0} failed: {1}".format(dirname, e))
self.sendStatus(
{'header': '{0}: {1}: {2}'.format(self.header, e.strerror, dirname)})
self.sendStatus({'rc': e.errno})
class RemoveDirectory(base.Command):
header = "rmdir"
# args['dir'] is relative to Builder directory, and is required.
requiredArgs = ['dir']
def setup(self, args):
self.logEnviron = args.get('logEnviron', True)
@defer.inlineCallbacks
def start(self):
args = self.args
dirnames = args['dir']
self.timeout = args.get('timeout', 120)
self.maxTime = args.get('maxTime', None)
self.rc = 0
if isinstance(dirnames, list):
assert dirnames
for dirname in dirnames:
res = yield self.removeSingleDir(dirname)
# Even if single removal of single file/dir consider it as
# failure of whole command, but continue removing other files
# Send 'rc' to master to handle failure cases
if res != 0:
self.rc = res
else:
self.rc = yield self.removeSingleDir(dirnames)
self.sendStatus({'rc': self.rc})
def removeSingleDir(self, dirname):
self.dir = os.path.join(self.builder.basedir, dirname)
if runtime.platformType != "posix":
d = threads.deferToThread(utils.rmdirRecursive, self.dir)
def cb(_):
return 0 # rc=0
def eb(f):
self.sendStatus(
{'header': 'exception from rmdirRecursive\n' + f.getTraceback()})
return -1 # rc=-1
d.addCallbacks(cb, eb)
else:
d = self._clobber(None)
return d
@defer.inlineCallbacks
def _clobber(self, dummy, chmodDone=False):
command = ["rm", "-rf", self.dir]
c = runprocess.RunProcess(self.builder, command, self.builder.basedir,
sendRC=0, timeout=self.timeout, maxTime=self.maxTime,
logEnviron=self.logEnviron, usePTY=False)
self.command = c
# sendRC=0 means the rm command will send stdout/stderr to the
# master, but not the rc=0 when it finishes. That job is left to
# _sendRC
rc = yield c.start()
# The rm -rf may fail if there is a left-over subdir with chmod 000
# permissions. So if we get a failure, we attempt to chmod suitable
# permissions and re-try the rm -rf.
if not chmodDone:
rc = yield self._tryChmod(rc)
defer.returnValue(rc)
@defer.inlineCallbacks
def _tryChmod(self, rc):
assert isinstance(rc, int)
if rc == 0:
defer.returnValue(0)
return # pragma: no cover
# Attempt a recursive chmod and re-try the rm -rf after.
command = ["chmod", "-Rf", "u+rwx",
os.path.join(self.builder.basedir, self.dir)]
if sys.platform.startswith('freebsd'):
# Work around a broken 'chmod -R' on FreeBSD (it tries to recurse into a
# directory for which it doesn't have permission, before changing that
# permission) by running 'find' instead
command = ["find", os.path.join(self.builder.basedir, self.dir),
'-exec', 'chmod', 'u+rwx', '{}', ';']
c = runprocess.RunProcess(self.builder, command, self.builder.basedir,
sendRC=0, timeout=self.timeout, maxTime=self.maxTime,
logEnviron=self.logEnviron, usePTY=False)
self.command = c
rc = yield c.start()
rc = yield self._clobber(rc, True)
defer.returnValue(rc)
class CopyDirectory(base.Command):
header = "cpdir"
# args['todir'] and args['fromdir'] are relative to Builder directory, and
# are required.
requiredArgs = ['todir', 'fromdir']
def setup(self, args):
self.logEnviron = args.get('logEnviron', True)
def start(self):
args = self.args
fromdir = os.path.join(self.builder.basedir, self.args['fromdir'])
todir = os.path.join(self.builder.basedir, self.args['todir'])
self.timeout = args.get('timeout', 120)
self.maxTime = args.get('maxTime', None)
if runtime.platformType != "posix":
d = threads.deferToThread(shutil.copytree, fromdir, todir)
def cb(_):
return 0 # rc=0
def eb(f):
self.sendStatus(
{'header': 'exception from copytree\n' + f.getTraceback()})
return -1 # rc=-1
d.addCallbacks(cb, eb)
@d.addCallback
def send_rc(rc):
self.sendStatus({'rc': rc})
else:
if not os.path.exists(os.path.dirname(todir)):
os.makedirs(os.path.dirname(todir))
if os.path.exists(todir):
# I don't think this happens, but just in case..
log.msg(
"cp target '{0}' already exists -- cp will not do what you think!".format(todir))
command = ['cp', '-R', '-P', '-p', '-v', fromdir, todir]
c = runprocess.RunProcess(self.builder, command, self.builder.basedir,
sendRC=False, timeout=self.timeout, maxTime=self.maxTime,
logEnviron=self.logEnviron, usePTY=False)
self.command = c
d = c.start()
d.addCallback(self._abandonOnFailure)
d.addCallbacks(self._sendRC, self._checkAbandoned)
return d
class StatFile(base.Command):
header = "stat"
# args['file'] is relative to Builder directory, and is required.
requireArgs = ['file']
def start(self):
filename = os.path.join(
self.builder.basedir, self.args.get('workdir', ''), self.args['file'])
try:
stat = os.stat(filename)
self.sendStatus({'stat': tuple(stat)})
self.sendStatus({'rc': 0})
except OSError as e:
log.msg("StatFile {0} failed: {1}".format(filename, e))
self.sendStatus(
{'header': '{0}: {1}: {2}'.format(self.header, e.strerror, filename)})
self.sendStatus({'rc': e.errno})
class GlobPath(base.Command):
header = "glob"
# args['path'] is relative to Builder directory, and is required.
requiredArgs = ['path']
def start(self):
pathname = os.path.join(self.builder.basedir, self.args['path'])
try:
# recursive matching is only support in python3.5+
if sys.version_info[:2] >= (3, 5):
files = glob.glob(pathname, recursive=True)
else:
files = glob.glob(pathname)
self.sendStatus({'files': files})
self.sendStatus({'rc': 0})
except OSError as e:
log.msg("GlobPath {0} failed: {1}".format(pathname, e))
self.sendStatus(
{'header': '{0}: {1}: {2}'.format(self.header, e.strerror, pathname)})
self.sendStatus({'rc': e.errno})
class ListDir(base.Command):
header = "listdir"
# args['dir'] is relative to Builder directory, and is required.
requireArgs = ['dir']
def start(self):
dirname = os.path.join(self.builder.basedir, self.args['dir'])
try:
files = os.listdir(dirname)
self.sendStatus({'files': files})
self.sendStatus({'rc': 0})
except OSError as e:
log.msg("ListDir {0} failed: {1}".format(dirname, e))
self.sendStatus(
{'header': '{0}: {1}: {2}'.format(self.header, e.strerror, dirname)})
self.sendStatus({'rc': e.errno})
class RemoveFile(base.Command):
header = "rmfile"
# args['path'] is relative to Builder directory, and is required.
requiredArgs = ['path']
def start(self):
pathname = os.path.join(self.builder.basedir, self.args['path'])
try:
os.remove(pathname)
self.sendStatus({'rc': 0})
except OSError as e:
log.msg("remove file {0} failed: {1}".format(pathname, e))
self.sendStatus(
{'header': '{0}: {1}: {2}'.format(self.header, e.strerror, pathname)})
self.sendStatus({'rc': e.errno})
| gpl-2.0 |
braingineer/AI-games | pacman/multiagent/keyboardAgents.py | 46 | 3045 | # keyboardAgents.py
# -----------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
from game import Agent
from game import Directions
import random
class KeyboardAgent(Agent):
"""
An agent controlled by the keyboard.
"""
# NOTE: Arrow keys also work.
WEST_KEY = 'a'
EAST_KEY = 'd'
NORTH_KEY = 'w'
SOUTH_KEY = 's'
STOP_KEY = 'q'
def __init__( self, index = 0 ):
self.lastMove = Directions.STOP
self.index = index
self.keys = []
def getAction( self, state):
from graphicsUtils import keys_waiting
from graphicsUtils import keys_pressed
keys = keys_waiting() + keys_pressed()
if keys != []:
self.keys = keys
legal = state.getLegalActions(self.index)
move = self.getMove(legal)
if move == Directions.STOP:
# Try to move in the same direction as before
if self.lastMove in legal:
move = self.lastMove
if (self.STOP_KEY in self.keys) and Directions.STOP in legal: move = Directions.STOP
if move not in legal:
move = random.choice(legal)
self.lastMove = move
return move
def getMove(self, legal):
move = Directions.STOP
if (self.WEST_KEY in self.keys or 'Left' in self.keys) and Directions.WEST in legal: move = Directions.WEST
if (self.EAST_KEY in self.keys or 'Right' in self.keys) and Directions.EAST in legal: move = Directions.EAST
if (self.NORTH_KEY in self.keys or 'Up' in self.keys) and Directions.NORTH in legal: move = Directions.NORTH
if (self.SOUTH_KEY in self.keys or 'Down' in self.keys) and Directions.SOUTH in legal: move = Directions.SOUTH
return move
class KeyboardAgent2(KeyboardAgent):
"""
A second agent controlled by the keyboard.
"""
# NOTE: Arrow keys also work.
WEST_KEY = 'j'
EAST_KEY = "l"
NORTH_KEY = 'i'
SOUTH_KEY = 'k'
STOP_KEY = 'u'
def getMove(self, legal):
move = Directions.STOP
if (self.WEST_KEY in self.keys) and Directions.WEST in legal: move = Directions.WEST
if (self.EAST_KEY in self.keys) and Directions.EAST in legal: move = Directions.EAST
if (self.NORTH_KEY in self.keys) and Directions.NORTH in legal: move = Directions.NORTH
if (self.SOUTH_KEY in self.keys) and Directions.SOUTH in legal: move = Directions.SOUTH
return move
| mit |
apanju/odoo | addons/account/test/test_parent_structure.py | 432 | 2108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# TODO: move this in a YAML test with !python tag
#
import xmlrpclib
DB = 'training3'
USERID = 1
USERPASS = 'admin'
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % ('localhost',8069))
ids = sock.execute(DB, USERID, USERPASS, 'account.account', 'search', [], {})
account_lists = sock.execute(DB, USERID, USERPASS, 'account.account', 'read', ids, ['parent_id','parent_left','parent_right'])
accounts = dict(map(lambda x: (x['id'],x), account_lists))
for a in account_lists:
if a['parent_id']:
assert a['parent_left'] > accounts[a['parent_id'][0]]['parent_left']
assert a['parent_right'] < accounts[a['parent_id'][0]]['parent_right']
assert a['parent_left'] < a['parent_right']
for a2 in account_lists:
assert not ((a2['parent_right']>a['parent_left']) and
(a2['parent_left']<a['parent_left']) and
(a2['parent_right']<a['parent_right']))
if a2['parent_id']==a['id']:
assert (a2['parent_left']>a['parent_left']) and (a2['parent_right']<a['parent_right'])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ahuarte47/QGIS | tests/src/python/test_core_additions.py | 22 | 1500 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for core additions
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Denis Rouzaud'
__date__ = '15.5.2018'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
import os
from qgis.testing import unittest, start_app
from qgis.core import metaEnumFromValue, metaEnumFromType, QgsTolerance, QgsMapLayer
from qgis.PyQt import sip
start_app()
class TestCoreAdditions(unittest.TestCase):
def testMetaEnum(self):
me = metaEnumFromValue(QgsTolerance.Pixels)
self.assertIsNotNone(me)
self.assertEqual(me.valueToKey(QgsTolerance.Pixels), 'Pixels')
# if using same variable twice (e.g. me = me2), this seg faults
me2 = metaEnumFromValue(QgsTolerance.Pixels, QgsTolerance)
self.assertIsNotNone(me)
self.assertEqual(me2.valueToKey(QgsTolerance.Pixels), 'Pixels')
# do not raise error
self.assertIsNone(metaEnumFromValue(1, QgsTolerance, False))
# do not provide an int
with self.assertRaises(TypeError):
metaEnumFromValue(1)
# QgsMapLayer.LayerType is not a Q_ENUM
with self.assertRaises(ValueError):
metaEnumFromValue(QgsMapLayer.LayerType)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
mrow4a/smashbox | lib/test_nplustwo.py | 6 | 1327 | import os
import time
import tempfile
__doc__ = """ Add nfiles to a directory (two clients) and check consistency after synch.
"""
from smashbox.utilities import *
from smashbox.utilities.hash_files import *
nfiles = int(config.get('nplustwo_nfiles',10))
def adder(step):
# if you don't want to cleanup server files from previous run then set oc_account_reset_procedure=keep option
#
# smash -o oc_account_reset_procedure=keep lib/test_nplustwo.py (or in etc/smashbox.conf)
reset_owncloud_account()
# cleanup all local files for the test
reset_rundir()
step(1,'Preparation')
d = make_workdir()
run_ocsync(d)
k0 = count_files(d)
step(2,'Add %s files and check if we still have k1+nfiles after resync'%nfiles)
for i in range(nfiles):
create_hashfile(d,size=config.hashfile_size)
run_ocsync(d)
step(3,'Get other files from server and check')
run_ocsync(d)
(ntot,k1,ncorruptions) = analyse_hashfiles(d)
error_check(k1-k0==2*nfiles,'Expecting to have %d files more: see k1=%d k0=%d'%(nfiles,k1,k0))
error_check(ncorruptions==0,'After synch %d corrupted files found'%(ncorruptions))
logger.info('SUCCESS: %d files found',k1)
@add_worker
def worker0(step):
adder(step)
@add_worker
def worker1(step):
adder(step)
| agpl-3.0 |
GeoNode/geonode-user-accounts | account/managers.py | 1 | 1031 | from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.get_or_create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
| mit |
hefen1/chromium | net/tools/testserver/testserver_base.py | 57 | 8501 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import BaseHTTPServer
import errno
import json
import optparse
import os
import re
import socket
import SocketServer
import struct
import sys
import warnings
import tlslite.errors
# Ignore deprecation warnings, they make our output more cluttered.
warnings.filterwarnings("ignore", category=DeprecationWarning)
if sys.platform == 'win32':
import msvcrt
# Using debug() seems to cause hangs on XP: see http://crbug.com/64515.
debug_output = sys.stderr
def debug(string):
debug_output.write(string + "\n")
debug_output.flush()
class Error(Exception):
"""Error class for this module."""
class OptionError(Error):
"""Error for bad command line options."""
class FileMultiplexer(object):
def __init__(self, fd1, fd2) :
self.__fd1 = fd1
self.__fd2 = fd2
def __del__(self) :
if self.__fd1 != sys.stdout and self.__fd1 != sys.stderr:
self.__fd1.close()
if self.__fd2 != sys.stdout and self.__fd2 != sys.stderr:
self.__fd2.close()
def write(self, text) :
self.__fd1.write(text)
self.__fd2.write(text)
def flush(self) :
self.__fd1.flush()
self.__fd2.flush()
class ClientRestrictingServerMixIn:
"""Implements verify_request to limit connections to our configured IP
address."""
def verify_request(self, _request, client_address):
return client_address[0] == self.server_address[0]
class BrokenPipeHandlerMixIn:
"""Allows the server to deal with "broken pipe" errors (which happen if the
browser quits with outstanding requests, like for the favicon). This mix-in
requires the class to derive from SocketServer.BaseServer and not override its
handle_error() method. """
def handle_error(self, request, client_address):
value = sys.exc_info()[1]
if isinstance(value, tlslite.errors.TLSClosedConnectionError):
print "testserver.py: Closed connection"
return
if isinstance(value, socket.error):
err = value.args[0]
if sys.platform in ('win32', 'cygwin'):
# "An established connection was aborted by the software in your host."
pipe_err = 10053
else:
pipe_err = errno.EPIPE
if err == pipe_err:
print "testserver.py: Broken pipe"
return
if err == errno.ECONNRESET:
print "testserver.py: Connection reset by peer"
return
SocketServer.BaseServer.handle_error(self, request, client_address)
class StoppableHTTPServer(BaseHTTPServer.HTTPServer):
"""This is a specialization of BaseHTTPServer to allow it
to be exited cleanly (by setting its "stop" member to True)."""
def serve_forever(self):
self.stop = False
self.nonce_time = None
while not self.stop:
self.handle_request()
self.socket.close()
def MultiplexerHack(std_fd, log_fd):
"""Creates a FileMultiplexer that will write to both specified files.
When running on Windows XP bots, stdout and stderr will be invalid file
handles, so log_fd will be returned directly. (This does not occur if you
run the test suite directly from a console, but only if the output of the
test executable is redirected.)
"""
if std_fd.fileno() <= 0:
return log_fd
return FileMultiplexer(std_fd, log_fd)
class BasePageHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self, request, client_address, socket_server,
connect_handlers, get_handlers, head_handlers, post_handlers,
put_handlers):
self._connect_handlers = connect_handlers
self._get_handlers = get_handlers
self._head_handlers = head_handlers
self._post_handlers = post_handlers
self._put_handlers = put_handlers
BaseHTTPServer.BaseHTTPRequestHandler.__init__(
self, request, client_address, socket_server)
def log_request(self, *args, **kwargs):
# Disable request logging to declutter test log output.
pass
def _ShouldHandleRequest(self, handler_name):
"""Determines if the path can be handled by the handler.
We consider a handler valid if the path begins with the
handler name. It can optionally be followed by "?*", "/*".
"""
pattern = re.compile('%s($|\?|/).*' % handler_name)
return pattern.match(self.path)
def do_CONNECT(self):
for handler in self._connect_handlers:
if handler():
return
def do_GET(self):
for handler in self._get_handlers:
if handler():
return
def do_HEAD(self):
for handler in self._head_handlers:
if handler():
return
def do_POST(self):
for handler in self._post_handlers:
if handler():
return
def do_PUT(self):
for handler in self._put_handlers:
if handler():
return
class TestServerRunner(object):
"""Runs a test server and communicates with the controlling C++ test code.
Subclasses should override the create_server method to create their server
object, and the add_options method to add their own options.
"""
def __init__(self):
self.option_parser = optparse.OptionParser()
self.add_options()
def main(self):
self.options, self.args = self.option_parser.parse_args()
logfile = open(self.options.log_file, 'w')
sys.stderr = MultiplexerHack(sys.stderr, logfile)
if self.options.log_to_console:
sys.stdout = MultiplexerHack(sys.stdout, logfile)
else:
sys.stdout = logfile
server_data = {
'host': self.options.host,
}
self.server = self.create_server(server_data)
self._notify_startup_complete(server_data)
self.run_server()
def create_server(self, server_data):
"""Creates a server object and returns it.
Must populate server_data['port'], and can set additional server_data
elements if desired."""
raise NotImplementedError()
def run_server(self):
try:
self.server.serve_forever()
except KeyboardInterrupt:
print 'shutting down server'
self.server.stop = True
def add_options(self):
self.option_parser.add_option('--startup-pipe', type='int',
dest='startup_pipe',
help='File handle of pipe to parent process')
self.option_parser.add_option('--log-to-console', action='store_const',
const=True, default=False,
dest='log_to_console',
help='Enables or disables sys.stdout logging '
'to the console.')
self.option_parser.add_option('--log-file', default='testserver.log',
dest='log_file',
help='The name of the server log file.')
self.option_parser.add_option('--port', default=0, type='int',
help='Port used by the server. If '
'unspecified, the server will listen on an '
'ephemeral port.')
self.option_parser.add_option('--host', default='127.0.0.1',
dest='host',
help='Hostname or IP upon which the server '
'will listen. Client connections will also '
'only be allowed from this address.')
self.option_parser.add_option('--data-dir', dest='data_dir',
help='Directory from which to read the '
'files.')
def _notify_startup_complete(self, server_data):
# Notify the parent that we've started. (BaseServer subclasses
# bind their sockets on construction.)
if self.options.startup_pipe is not None:
server_data_json = json.dumps(server_data)
server_data_len = len(server_data_json)
print 'sending server_data: %s (%d bytes)' % (
server_data_json, server_data_len)
if sys.platform == 'win32':
fd = msvcrt.open_osfhandle(self.options.startup_pipe, 0)
else:
fd = self.options.startup_pipe
startup_pipe = os.fdopen(fd, "w")
# First write the data length as an unsigned 4-byte value. This
# is _not_ using network byte ordering since the other end of the
# pipe is on the same machine.
startup_pipe.write(struct.pack('=L', server_data_len))
startup_pipe.write(server_data_json)
startup_pipe.close()
| bsd-3-clause |
dajohnso/cfme_tests | cfme/tests/configure/test_access_control.py | 1 | 30390 | # -*- coding: utf-8 -*-
import fauxfactory
import pytest
import traceback
from cfme.configure.access_control import User, Group, Role, Tenant, Project
from utils import error
import cfme.fixtures.pytest_selenium as sel
from cfme import test_requirements
from cfme.base.credential import Credential
from cfme.automate.explorer import AutomateExplorer # NOQA
from cfme.base import Server
from cfme.control.explorer import ControlExplorer # NOQA
from cfme.exceptions import OptionNotAvailable
from cfme.common.provider import base_types
from cfme.infrastructure import virtual_machines as vms
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.services.myservice import MyService
from cfme.web_ui import flash, Table, InfoBlock, toolbar as tb
from cfme.configure import tasks
from fixtures.provider import setup_one_or_skip
from utils.appliance.implementations.ui import navigate_to
from utils.blockers import BZ
from utils.log import logger
from utils.providers import ProviderFilter
from utils.update import update
from utils import version
records_table = Table("//div[@id='main_div']//table")
usergrp = Group(description='EvmGroup-user')
group_table = Table("//div[@id='main_div']//table")
pytestmark = test_requirements.rbac
@pytest.fixture(scope='module')
def a_provider(request):
prov_filter = ProviderFilter(classes=[VMwareProvider])
return setup_one_or_skip(request, filters=[prov_filter])
def new_credential():
return Credential(principal='uid' + fauxfactory.gen_alphanumeric(), secret='redhat')
def new_user(group=usergrp):
return User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=new_credential(),
email='xyz@redhat.com',
group=group,
cost_center='Workload',
value_assign='Database')
def new_group(role='EvmRole-approver'):
return Group(
description='grp' + fauxfactory.gen_alphanumeric(),
role=role)
def new_role():
return Role(
name='rol' + fauxfactory.gen_alphanumeric(),
vm_restriction='None')
def get_tag():
return InfoBlock('Smart Management', 'My Company Tags').text
@pytest.fixture(scope='function')
def check_item_visibility(tag):
def _check_item_visibility(item, user_restricted):
category_name = ' '.join((tag.category.display_name, '*'))
item.edit_tags(category_name, tag.display_name)
with user_restricted:
assert item.exists
item.remove_tag(category_name, tag.display_name)
with user_restricted:
assert not item.exists
return _check_item_visibility
# User test cases
@pytest.mark.tier(2)
def test_user_crud():
user = new_user()
user.create()
with update(user):
user.name = user.name + "edited"
copied_user = user.copy()
copied_user.delete()
user.delete()
# @pytest.mark.meta(blockers=[1035399]) # work around instead of skip
@pytest.mark.tier(2)
def test_user_login():
user = new_user()
user.create()
try:
with user:
navigate_to(Server, 'Dashboard')
finally:
user.appliance.server.login_admin()
@pytest.mark.tier(3)
def test_user_duplicate_name(appliance):
region = appliance.server_region
nu = new_user()
nu.create()
msg = version.pick({
version.LOWEST: "Userid has already been taken",
'5.8': "Userid is not unique within region {}".format(region)
})
with error.expected(msg):
nu.create()
group_user = Group("EvmGroup-user")
@pytest.mark.tier(3)
def test_username_required_error_validation():
user = User(
name="",
credential=new_credential(),
email='xyz@redhat.com',
group=group_user)
with error.expected("Name can't be blank"):
user.create()
@pytest.mark.tier(3)
def test_userid_required_error_validation():
user = User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=Credential(principal='', secret='redhat'),
email='xyz@redhat.com',
group=group_user)
with error.expected("Userid can't be blank"):
user.create()
@pytest.mark.tier(3)
def test_user_password_required_error_validation():
user = User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=Credential(principal='uid' + fauxfactory.gen_alphanumeric(), secret=None),
email='xyz@redhat.com',
group=group_user)
if version.current_version() < "5.5":
check = "Password_digest can't be blank"
else:
check = "Password can't be blank"
with error.expected(check):
user.create()
@pytest.mark.tier(3)
def test_user_group_error_validation():
user = User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=new_credential(),
email='xyz@redhat.com',
group='')
with error.expected("A User must be assigned to a Group"):
user.create()
@pytest.mark.tier(3)
def test_user_email_error_validation():
user = User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=new_credential(),
email='xyzdhat.com',
group=group_user)
with error.expected("Email must be a valid email address"):
user.create()
@pytest.mark.tier(2)
def test_user_edit_tag():
user = new_user()
user.create()
user.edit_tags("Cost Center *", "Cost Center 001")
assert get_tag() == "Cost Center: Cost Center 001", "User edit tag failed"
user.delete()
@pytest.mark.tier(3)
def test_user_remove_tag():
user = new_user()
user.create()
user.edit_tags("Department", "Engineering")
user.remove_tag("Department", "Engineering")
navigate_to(user, 'Details')
assert get_tag() != "Department: Engineering", "Remove User tag failed"
user.delete()
@pytest.mark.tier(3)
def test_delete_default_user():
"""Test for deleting default user Administrator.
Steps:
* Login as Administrator user
* Try deleting the user
"""
user = User(name='Administrator')
navigate_to(User, 'All')
column = version.pick({version.LOWEST: "Name",
"5.4": "Full Name"})
row = records_table.find_row_by_cells({column: user.name})
sel.check(sel.element(".//input[@type='checkbox']", root=row[0]))
tb.select('Configuration', 'Delete selected Users', invokes_alert=True)
sel.handle_alert()
flash.assert_message_match('Default EVM User "{}" cannot be deleted' .format(user.name))
@pytest.mark.tier(3)
@pytest.mark.meta(automates=[BZ(1090877)])
@pytest.mark.meta(blockers=[BZ(1408479)], forced_streams=["5.7", "upstream"])
@pytest.mark.uncollectif(lambda: version.current_version() >= "5.7")
def test_current_user_login_delete(request):
"""Test for deleting current user login.
Steps:
* Login as Admin user
* Create a new user
* Login with the new user
* Try deleting the user
"""
group_user = Group("EvmGroup-super_administrator")
user = User(
name='user' + fauxfactory.gen_alphanumeric(),
credential=new_credential(),
email='xyz@redhat.com',
group=group_user)
user.create()
request.addfinalizer(user.delete)
request.addfinalizer(user.appliance.server.login_admin())
with user:
if version.current_version() >= '5.7':
navigate_to(user, 'Details')
menu_item = ('Configuration', 'Delete this User')
assert tb.exists(*menu_item) and tb.is_greyed(*menu_item), "Delete User is not dimmed"
else:
with error.expected("Current EVM User \"{}\" cannot be deleted".format(user.name)):
user.delete()
@pytest.mark.tier(3)
def test_tagvis_user(user_restricted, check_item_visibility):
""" Tests if group honour tag visibility feature
Prerequirement:
Catalog, tag, role, group and restricted user should be created
Steps:
1. As admin add tag to group
2. Login as restricted user, group is visible for user
3. As admin remove tag from group
4. Login as restricted user, group is not visible for user
"""
check_item_visibility(user_restricted, user_restricted)
@pytest.mark.tier(2)
# Group test cases
def test_group_crud():
group = new_group()
group.create()
with update(group):
group.description = group.description + "edited"
group.delete()
@pytest.mark.tier(2)
def test_group_crud_with_tag(a_provider, category, tag):
"""Test for verifying group create with tag defined
Steps:
* Login as Admin user
* Navigate to add group page
* Fill all fields
* Set tag
* Save group
"""
group = Group(
description='grp{}'.format(fauxfactory.gen_alphanumeric()),
role='EvmRole-approver',
tag=[category.display_name, tag.display_name],
host_cluster=[a_provider.data['name']],
vm_template=[a_provider.data['name'], a_provider.data['datacenters'][0],
'Discovered virtual machine']
)
group.create()
with update(group):
group.tag = [tag.category.display_name, tag.display_name]
group.host_cluster = [a_provider.data['name']]
group.vm_template = [a_provider.data['name'], a_provider.data['datacenters'][0],
'Discovered virtual machine']
group.delete()
@pytest.mark.tier(3)
def test_group_duplicate_name(appliance):
region = appliance.server_region
group = new_group()
group.create()
msg = version.pick({
version.LOWEST: "Description has already been taken",
'5.8': "Description is not unique within region {}".format(region)
})
with error.expected(msg):
group.create()
@pytest.mark.tier(2)
def test_group_edit_tag():
group = new_group()
group.create()
group.edit_tags("Cost Center *", "Cost Center 001")
assert get_tag() == "Cost Center: Cost Center 001", "Group edit tag failed"
group.delete()
@pytest.mark.tier(2)
def test_group_remove_tag():
group = new_group()
group.create()
navigate_to(group, 'Edit')
group.edit_tags("Department", "Engineering")
group.remove_tag("Department", "Engineering")
assert get_tag() != "Department: Engineering", "Remove Group tag failed"
group.delete()
@pytest.mark.tier(3)
def test_group_description_required_error_validation():
error_text = "Description can't be blank"
group = Group(description=None, role='EvmRole-approver')
with error.expected(error_text):
group.create()
flash.dismiss()
@pytest.mark.tier(3)
def test_delete_default_group():
flash_msg = "EVM Group \"{}\": Error during delete: A read only group cannot be deleted."
group = Group(description='EvmGroup-administrator')
view = navigate_to(Group, 'All')
row = group_table.find_row_by_cells({'Name': group.description})
sel.check(sel.element(".//input[@type='checkbox']", root=row[0]))
view.configuration.item_select('Delete selected Groups', handle_alert=True)
view.flash.assert_message(flash_msg.format(group.description))
@pytest.mark.tier(3)
def test_delete_group_with_assigned_user():
flash_msg = version.pick({
'5.6': ("EVM Group \"{}\": Error during delete: Still has users assigned"),
'5.5': ("EVM Group \"{}\": Error during \'destroy\': Still has users assigned")})
group = new_group()
group.create()
user = new_user(group=group)
user.create()
with error.expected(flash_msg.format(group.description)):
group.delete()
@pytest.mark.tier(3)
def test_edit_default_group():
flash_msg = 'Read Only EVM Group "{}" can not be edited'
group = Group(description='EvmGroup-approver')
navigate_to(Group, 'All')
row = group_table.find_row_by_cells({'Name': group.description})
sel.check(sel.element(".//input[@type='checkbox']", root=row[0]))
tb.select('Configuration', 'Edit the selected Group')
flash.assert_message_match(flash_msg.format(group.description))
@pytest.mark.tier(3)
def test_edit_sequence_usergroups(request):
"""Test for editing the sequence of user groups for LDAP lookup.
Steps:
* Login as Administrator user
* create a new group
* Edit the sequence of the new group
* Verify the changed sequence
"""
group = new_group()
group.create()
request.addfinalizer(group.delete)
view = navigate_to(Group, 'All')
row = view.table.row(name=group.description)
original_sequence = row.sequence.text
group.set_group_order(group.description)
row = view.table.row(name=group.description)
changed_sequence = row.sequence.text
assert original_sequence != changed_sequence, "Edit Sequence Failed"
@pytest.mark.tier(3)
def test_tagvis_group(user_restricted, group_with_tag, check_item_visibility):
""" Tests if group honour tag visibility feature
Prerequirement:
Catalog, tag, role, group and restricted user should be created
Steps:
1. As admin add tag to group
2. Login as restricted user, group is visible for user
3. As admin remove tag from group
4. Login as restricted user, group is not visible for user
"""
check_item_visibility(group_with_tag, user_restricted)
# Role test cases
@pytest.mark.tier(2)
def test_role_crud():
role = new_role()
role.create()
with update(role):
role.name = role.name + "edited"
copied_role = role.copy()
copied_role.delete()
role.delete()
@pytest.mark.tier(3)
def test_rolename_required_error_validation():
role = Role(
name=None,
vm_restriction='Only User Owned')
with error.expected("Name can't be blank"):
role.create()
@pytest.mark.tier(3)
def test_rolename_duplicate_validation():
role = new_role()
role.create()
with error.expected("Name has already been taken"):
role.create()
@pytest.mark.tier(3)
def test_delete_default_roles():
flash_msg = version.pick({
'5.6': ("Role \"{}\": Error during delete: Cannot delete record "
"because of dependent entitlements"),
'5.5': ("Role \"{}\": Error during \'destroy\': Cannot delete record "
"because of dependent miq_groups")})
role = Role(name='EvmRole-approver')
with error.expected(flash_msg.format(role.name)):
role.delete()
@pytest.mark.tier(3)
def test_edit_default_roles():
role = Role(name='EvmRole-auditor')
navigate_to(role, 'Edit')
flash.assert_message_match("Read Only Role \"{}\" can not be edited" .format(role.name))
@pytest.mark.tier(3)
def test_delete_roles_with_assigned_group():
flash_msg = version.pick({
'5.6': ("Role \"{}\": Error during delete: Cannot delete record "
"because of dependent entitlements"),
'5.5': ("Role \"{}\": Error during \'destroy\': Cannot delete record "
"because of dependent miq_groups")})
role = new_role()
role.create()
group = new_group(role=role.name)
group.create()
with error.expected(flash_msg.format(role.name)):
role.delete()
@pytest.mark.tier(3)
def test_assign_user_to_new_group():
role = new_role() # call function to get role
role.create()
group = new_group(role=role.name)
group.create()
user = new_user(group=group)
user.create()
def _test_vm_provision():
logger.info("Checking for provision access")
navigate_to(vms.Vm, 'VMsOnly')
vms.lcl_btn("Provision VMs")
def _test_vm_power_on():
"""Ensures power button is shown for a VM"""
logger.info("Checking for power button")
vm_name = vms.Vm.get_first_vm_title()
logger.debug("VM " + vm_name + " selected")
if not vms.is_pwr_option_visible(vm_name, option=vms.Vm.POWER_ON):
raise OptionNotAvailable("Power button does not exist")
def _test_vm_removal():
logger.info("Testing for VM removal permission")
vm_name = vms.get_first_vm()
logger.debug("VM " + vm_name + " selected")
vms.remove(vm_name, cancel=True)
@pytest.mark.tier(3)
@pytest.mark.parametrize(
'product_features, action',
[(
{version.LOWEST: [['Everything', 'Infrastructure', 'Virtual Machines', 'Accordions'],
['Everything', 'Access Rules for all Virtual Machines', 'VM Access Rules', 'Modify',
'Provision VMs']],
'5.6': [['Everything', 'Compute', 'Infrastructure', 'Virtual Machines', 'Accordions'],
['Everything', 'Access Rules for all Virtual Machines', 'VM Access Rules', 'Modify',
'Provision VMs']]},
_test_vm_provision)])
def test_permission_edit(appliance, request, product_features, action):
"""
Ensures that changes in permissions are enforced on next login
"""
product_features = version.pick(product_features)
request.addfinalizer(appliance.server.login_admin())
role_name = fauxfactory.gen_alphanumeric()
role = Role(name=role_name,
vm_restriction=None,
product_features=[(['Everything'], False)] + # role_features
[(k, True) for k in product_features])
role.create()
group = new_group(role=role.name)
group.create()
user = new_user(group=group)
user.create()
with user:
try:
action()
except Exception:
pytest.fail('Incorrect permissions set')
appliance.server.login_admin()
role.update({'product_features': [(['Everything'], True)] +
[(k, False) for k in product_features]
})
with user:
try:
with error.expected(Exception):
action()
except error.UnexpectedSuccessException:
pytest.Fails('Permissions have not been updated')
def _mk_role(name=None, vm_restriction=None, product_features=None):
"""Create a thunk that returns a Role object to be used for perm
testing. name=None will generate a random name
"""
name = name or fauxfactory.gen_alphanumeric()
return lambda: Role(name=name,
vm_restriction=vm_restriction,
product_features=product_features)
def _go_to(cls, dest='All'):
"""Create a thunk that navigates to the given destination"""
return lambda: navigate_to(cls, dest)
cat_name = "Settings"
@pytest.mark.tier(3)
@pytest.mark.parametrize(
'role,allowed_actions,disallowed_actions',
[[_mk_role(product_features=[[['Everything'], False], # minimal permission
[['Everything', cat_name, 'Tasks'], True]]),
{'tasks': lambda: sel.click(tasks.buttons.default)}, # can only access one thing
{
'my services': _go_to(MyService),
'chargeback': _go_to(Server, 'Chargeback'),
'clouds providers': _go_to(base_types()['cloud']),
'infrastructure providers': _go_to(base_types()['infra']),
'control explorer': _go_to(Server, 'ControlExplorer'),
'automate explorer': _go_to(Server, 'AutomateExplorer')}],
[_mk_role(product_features=[[['Everything'], True]]), # full permissions
{
'my services': _go_to(MyService),
'chargeback': _go_to(Server, 'Chargeback'),
'clouds providers': _go_to(base_types()['cloud']),
'infrastructure providers': _go_to(base_types()['infra']),
'control explorer': _go_to(Server, 'ControlExplorer'),
'automate explorer': _go_to(Server, 'AutomateExplorer')},
{}]])
@pytest.mark.meta(blockers=[1262759])
def test_permissions(appliance, role, allowed_actions, disallowed_actions):
# create a user and role
role = role() # call function to get role
role.create()
group = new_group(role=role.name)
group.create()
user = new_user(group=group)
user.create()
fails = {}
try:
with user:
appliance.server.login_admin()
for name, action_thunk in allowed_actions.items():
try:
action_thunk()
except Exception:
fails[name] = "{}: {}".format(name, traceback.format_exc())
for name, action_thunk in disallowed_actions.items():
try:
with error.expected(Exception):
action_thunk()
except error.UnexpectedSuccessException:
fails[name] = "{}: {}".format(name, traceback.format_exc())
if fails:
message = ''
for failure in fails.values():
message = "{}\n\n{}".format(message, failure)
raise Exception(message)
finally:
appliance.server.login_admin()
def single_task_permission_test(appliance, product_features, actions):
"""Tests that action succeeds when product_features are enabled, and
fail when everything but product_features are enabled"""
test_permissions(appliance, _mk_role(name=fauxfactory.gen_alphanumeric(),
product_features=[(['Everything'], False)] +
[(f, True) for f in product_features]),
actions,
{})
test_permissions(appliance, _mk_role(name=fauxfactory.gen_alphanumeric(),
product_features=[(['Everything'], True)] +
[(f, False) for f in product_features]),
{},
actions)
@pytest.mark.tier(3)
@pytest.mark.meta(blockers=[1262764])
def test_permissions_role_crud(appliance):
single_task_permission_test(appliance,
[['Everything', cat_name, 'Configuration'],
['Everything', 'Services', 'Catalogs Explorer']],
{'Role CRUD': test_role_crud})
@pytest.mark.tier(3)
def test_permissions_vm_provisioning(appliance):
features = version.pick({
version.LOWEST: [
['Everything', 'Infrastructure', 'Virtual Machines', 'Accordions'],
['Everything', 'Access Rules for all Virtual Machines', 'VM Access Rules', 'Modify',
'Provision VMs']
],
'5.6': [
['Everything', 'Compute', 'Infrastructure', 'Virtual Machines', 'Accordions'],
['Everything', 'Access Rules for all Virtual Machines', 'VM Access Rules', 'Modify',
'Provision VMs']
]})
single_task_permission_test(
appliance,
features,
{'Provision VM': _test_vm_provision}
)
# This test is disabled until it has been rewritten
# def test_permissions_vm_power_on_access(appliance):
# # Ensure VMs exist
# if not vms.get_number_of_vms():
# logger.debug("Setting up providers")
# infra_provider
# logger.debug("Providers setup")
# single_task_permission_test(
# appliance,
# [
# ['Infrastructure', 'Virtual Machines', 'Accordions'],
# ['Infrastructure', 'Virtual Machines', 'VM Access Rules', 'Operate', 'Power On']
# ],
# {'VM Power On': _test_vm_power_on}
# )
# This test is disabled until it has been rewritten
# def test_permissions_vm_remove(appliance):
# # Ensure VMs exist
# if not vms.get_number_of_vms():
# logger.debug("Setting up providers")
# setup_infrastructure_providers()
# logger.debug("Providers setup")
# single_task_permission_test(
# appliance,
# [
# ['Infrastructure', 'Virtual Machines', 'Accordions'],
# ['Infrastructure', 'Virtual Machines', 'VM Access Rules', 'Modify', 'Remove']
# ],
# {'Remove VM': _test_vm_removal}
# )
# commenting this out, there is validation around the 'no group selected'and we have a test for it
# @pytest.mark.meta(blockers=[1154112])
# def test_user_add_button_should_be_disabled_without_group(soft_assert):
# from cfme.web_ui import fill, form_buttons
# navigate_to(User, 'Add')
# pw = fauxfactory.gen_alphanumeric()
# fill(User.user_form, {
# "name_txt": fauxfactory.gen_alphanumeric(),
# "userid_txt": fauxfactory.gen_alphanumeric(),
# "password_txt": pw,
# "password_verify_txt": pw,
# "email_txt": "test@test.test"
# })
# assert not sel.is_displayed(form_buttons.add), "The Add button should not be displayed!"
@pytest.mark.tier(2)
def test_user_change_password(appliance, request):
user = User(
name="user {}".format(fauxfactory.gen_alphanumeric()),
credential=Credential(
principal="user_principal_{}".format(fauxfactory.gen_alphanumeric()),
secret="very_secret",
verify_secret="very_secret"
),
email="test@test.test",
group=usergrp,
)
user.create()
request.addfinalizer(user.delete)
request.addfinalizer(appliance.server.login_admin())
with user:
appliance.server.logout()
appliance.server.login(user)
assert appliance.server.current_full_name() == user.name
appliance.server.login_admin()
with update(user):
user.credential = Credential(
principal=user.credential.principal,
secret="another_very_secret",
verify_secret="another_very_secret",
)
with user:
appliance.server.logout()
appliance.server.login(user)
assert appliance.server.current_full_name() == user.name
# Tenant/Project test cases
@pytest.mark.tier(3)
def test_superadmin_tenant_crud(request):
"""Test suppose to verify CRUD operations for CFME tenants
Prerequisities:
* This test is not depending on any other test and can be executed against fresh appliance.
Steps:
* Create tenant
* Update description of tenant
* Update name of tenat
* Delete tenant
"""
tenant = Tenant(
name='tenant1' + fauxfactory.gen_alphanumeric(),
description='tenant1 description')
@request.addfinalizer
def _delete_tenant():
if tenant.exists:
tenant.delete()
tenant.create()
with update(tenant):
tenant.description = tenant.description + "edited"
with update(tenant):
tenant.name = tenant.name + "edited"
tenant.delete()
@pytest.mark.tier(3)
@pytest.mark.meta(blockers=[BZ(1387088, forced_streams=['5.7', 'upstream'])])
def test_superadmin_tenant_project_crud(request):
"""Test suppose to verify CRUD operations for CFME projects
Prerequisities:
* This test is not depending on any other test and can be executed against fresh appliance.
Steps:
* Create tenant
* Create project as child to tenant
* Update description of project
* Update name of project
* Delete project
* Delete tenant
"""
tenant = Tenant(
name='tenant1' + fauxfactory.gen_alphanumeric(),
description='tenant1 description')
project = Project(
name='project1' + fauxfactory.gen_alphanumeric(),
description='project1 description',
parent_tenant=tenant)
@request.addfinalizer
def _delete_tenant_and_project():
for item in [project, tenant]:
if item.exists:
item.delete()
tenant.create()
project.create()
with update(project):
project.description = project.description + "edited"
with update(project):
project.name = project.name + "edited"
project.delete()
tenant.delete()
@pytest.mark.tier(3)
@pytest.mark.parametrize('number_of_childrens', [5])
def test_superadmin_child_tenant_crud(request, number_of_childrens):
"""Test CRUD operations for CFME child tenants, where several levels of tenants are created.
Prerequisities:
* This test is not depending on any other test and can be executed against fresh appliance.
Steps:
* Create 5 tenants where the next tenant is always child to the previous one
* Update description of tenant(N-1)_* in the tree
* Update name of tenant(N-1)_*
* Delete all created tenants in reversed order
"""
tenant = None
tenant_list = []
@request.addfinalizer
def _delete_tenants():
# reversed because we need to go from the last one
for tenant in reversed(tenant_list):
if tenant.exists:
tenant.delete()
for i in range(1, number_of_childrens + 1):
new_tenant = Tenant(
name="tenant{}_{}".format(i, fauxfactory.gen_alpha(4)),
description=fauxfactory.gen_alphanumeric(16),
parent_tenant=tenant)
tenant_list.append(new_tenant)
new_tenant.create()
tenant = new_tenant
tenant_update = tenant.parent_tenant
with update(tenant_update):
tenant_update.description = tenant_update.description + "edited"
with update(tenant_update):
tenant_update.name = tenant_update.name + "edited"
for tenant_item in reversed(tenant_list):
tenant_item.delete()
assert not tenant_item.exists
@pytest.mark.tier(3)
@pytest.mark.parametrize("object_type", [Tenant, Project])
def test_tenant_unique_tenant_project_name_on_parent_level(request, object_type):
"""Tenant or Project has always unique name on parent level. Same name cannot be used twice.
Prerequisities:
* This test is not depending on any other test and can be executed against fresh appliance.
Steps:
* Create tenant or project
* Create another tenant or project with the same name
* Creation will fail because object with the same name exists
* Delete created objects
"""
name_of_tenant = object_type.__name__ + fauxfactory.gen_alphanumeric()
tenant_description = object_type.__name__ + 'description'
tenant = object_type(
name=name_of_tenant,
description=tenant_description)
tenant2 = object_type(
name=name_of_tenant,
description=tenant_description)
@request.addfinalizer
def _delete_tenant():
if tenant.exists:
tenant.delete()
if tenant2.exists:
tenant2.delete()
tenant.create()
with error.expected("Validation failed: Name should be unique per parent"):
tenant2.create()
tenant.delete()
| gpl-2.0 |
analyseuc3m/ANALYSE-v1 | common/djangoapps/third_party_auth/middleware.py | 169 | 1053 | """Middleware classes for third_party_auth."""
from social.apps.django_app.middleware import SocialAuthExceptionMiddleware
from . import pipeline
class ExceptionMiddleware(SocialAuthExceptionMiddleware):
"""Custom middleware that handles conditional redirection."""
def get_redirect_uri(self, request, exception):
# Fall back to django settings's SOCIAL_AUTH_LOGIN_ERROR_URL.
redirect_uri = super(ExceptionMiddleware, self).get_redirect_uri(request, exception)
# Safe because it's already been validated by
# pipeline.parse_query_params. If that pipeline step ever moves later
# in the pipeline stack, we'd need to validate this value because it
# would be an injection point for attacker data.
auth_entry = request.session.get(pipeline.AUTH_ENTRY_KEY)
# Check if we have an auth entry key we can use instead
if auth_entry and auth_entry in pipeline.AUTH_DISPATCH_URLS:
redirect_uri = pipeline.AUTH_DISPATCH_URLS[auth_entry]
return redirect_uri
| agpl-3.0 |
Frank-Wu/RamCloud | bindings/python/txramcloud.py | 19 | 34439 | # Copyright (c) 2010 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Implements client-side transactions for RAMCloud.
This module buys you fully ACID transactions, but you'll pay for it:
In non-transactional operations:
- Blind (unconditional) operations are now about twice as slow as operations
that depend on a specific version number.
In transactional operations:
- Client clocks must be roughly synchronized for good performance, though clock
skew of a few seconds is tolerated.
- Performance is on the order of six times as slow for small transactions and
twice as slow for large transactions, as compared to non-transactional
operations.
- Having an object in the read-set of a transaction bumps its version number.
- No support for server-assigned object IDs.
Internals
=========
Glossary
--------
- A I{seed} object is one that is taking part in a transaction but does not
otherwise exist.
- A I{masked} object has a header that points to the transaction ID that
it is a part of. Masks are mutually exclusive: an object is masked by 0 or 1
transactions at any given time.
- A I{tombstone} object sits in place of a transaction intent object to block
that transaction intent from being written.
- The I{coordinator} is the client that is attempting to commit the
transaction.
Algorithm Overview
------------------
Assuming initially object ID 1 contains A, 2 contains B, and 3 contains C.
If the transaction is successful, this is the process:
1. Coordinator reserves object ID for T and uses this as the transaction ID.
2. Coordinator adds masks to objects 1, 2, 3 which all point to T (T does not
yet exist).
3. Coordinator creates T with the transaction intent (write A' at 1 over
version V1, write B' at 2 over version V2, write C' at 3 over version V3).
4. Coordinator writes back A' into 1, B' into 2, C' into 3, unmasking the
objects.
5. Coordinator deletes T.
If some other client wants to abort T before the coordinator reaches step 3, the
other client may create a tombstone at T's object ID. This blocks the create in
step 3 and the coordinator will be forced to abort. However, if the coordinator
has already reached step 3, the other client's tombstone will be blocked by T
and the transaction is no longer abortable.
If some other client finds an object masked by a committed transaction object,
it can do the write-back (step 4) on behalf of the coordinator, even racing the
coordinator.
If some other client finds an object masked by a tombstone, it can remove the mask.
If some other client finds an object masked by a missing transaction object, it
can either wait for a transaction object to appear, or it can create a tombstone
and then remove the mask.
Consequences
------------
- 1:A, 2:B, 3:C appear an extra time in the log (when they are masked).
- A', B', C' appear an extra time in the log (in T).
- It'd be hard to do this safely if we need to consider access control. (We
don't with workspaces.)
"""
import struct
import time
import cPickle as pickle
import retries
import ramcloud
from oidres import OIDRes
RAMCloud = ramcloud.RAMCloud
RetryStrategy = retries.FuzzyExponentialBackoff
TXRAMCLOUD_VERSION = 1
TXRAMCLOUD_HEADER = "txramcloud"
TXID_RES_OID = 2**64 - 1
def serialize(data):
"""Pickle some Python object."""
return pickle.dumps(data, protocol=2)
def unserialize(serialized):
"""Unpickle some serialized Python object."""
return pickle.loads(serialized)
HeaderFmt = struct.Struct("14sBBQd")
def unpack(blob):
"""Split a binary blob into its txid, timeout, and data.
L{pack} is the inverse of L{unpack}.
@param blob: binary blob as produced by L{pack}
@type blob: C{str}
@return: see L{pack} parameters
@rtype: (txid, timeout, data)
"""
if len(blob) < 32:
raise TxRAMCloud.InconsistencyError("Object too small")
header, version, seed, txid, timeout = HeaderFmt.unpack(blob[:32])
if header.rstrip('\0') != TXRAMCLOUD_HEADER:
raise TxRAMCloud.InconsistencyError("Object header not set to " +
"TXRAMCLOUD_HEADER")
if version != TXRAMCLOUD_VERSION:
raise TxRAMCloud.OutdatedClient("Encountered object with version %d" %
version)
if seed:
if txid == 0:
raise TxRAMCloud.InconsistencyError("Seed without transaction")
data = None
else:
data = blob[32:]
return (txid, timeout, data)
def pack(txid, timeout, data):
"""Pack a txid, timeout, and data into a binary blob.
L{unpack} is the inverse of L{pack}.
@param txid: transaction id, or 0 for no transaction
@type txid: C{int}
@param timeout: when the transaction expires (in seconds after the UNIX
epoch), or 0 for no transaction
@type timeout: C{float}
@param data: the underlying object's contents, or L{None} if there is no
underlying object (then txid must be set)
@type data: C{str} or C{None}
@return: binary blob
@rtype: C{str}
"""
if data is None:
assert txid != 0
return HeaderFmt.pack(TXRAMCLOUD_HEADER, TXRAMCLOUD_VERSION,
True, txid, timeout)
else:
return HeaderFmt.pack(TXRAMCLOUD_HEADER, TXRAMCLOUD_VERSION,
False, txid, timeout) + data
class TxRAMCloud(RAMCloud):
"""Implements client-side transactions for RAMCloud.
This module assumes all version numbers passed in refer to objects that are
clean. Effectively, that means the user's application code shouldn't
fabricate version numbers.
Internal Details
================
- L{read_rr} will only return once the object read is not masked.
Thus, all version numbers returned from L{read_rr} are "safe".
- L{write_rr} will never overwrite an object that's in a transaction.
- L{delete_rr} will never delete an object that's in a transaction.
- L{ramcloud.RAMCloud} will never handle raw user data; it will always be
packed with L{pack}.
"""
class InconsistencyError(Exception):
"""Raised when a malformed TxRAMCloud object is discovered."""
pass
class OutdatedClient(Exception):
"""Raised if this client needs to be updated."""
pass
class TransactionAborted(Exception):
"""Raised if the minitransaction was aborted.
This would be raised if the minitransaction was aborted either due to
reject rules or by another client.
This exception should be considered abstract and should never be raised.
Use L{TransactionRejected} and L{TransactionExpired}, its subclasses,
instead.
"""
pass
class TransactionRejected(TransactionAborted):
"""Raised if the minitransaction was aborted due to reject rules.
@ivar reasons: A mapping from (table_id, oid) to L{NoObjectError},
L{ObjectExistsError}, or L{VersionError}. Some
non-empty subset of the objects of the transaction
whose operation's reject rules have not been met
will be present.
@type reasons: C{dict}
"""
def __init__(self):
TxRAMCloud.TransactionAborted.__init__(self)
self.reasons = {}
class TransactionExpired(TransactionAborted):
"""Raised if the minitransaction was aborted by another client."""
pass
def __init__(self, tx_table):
"""
@param tx_table: a table reserved for this module's use
@type tx_table: C{int}
"""
if RAMCloud == ramcloud.RAMCloud:
RAMCloud.__init__(self)
else:
# unit tests are messing around in here!
pass
self.txid_res = OIDRes(rc=self, table=tx_table, oid=TXID_RES_OID)
self.tx_table = tx_table
def __del__(self):
if RAMCloud == ramcloud.RAMCloud:
RAMCloud.__init__(self)
else:
# unit tests are messing around in here!
pass
def insert(self, table_id, data):
blob = pack(0, 0, data)
return RAMCloud.insert(self, table_id, blob)
def _write_tombstone(self, txid):
"""Force a transaction to abort.
Writes a tombstone at txid if no object exists there.
@param txid: The ID of the transaction to abort.
@type txid: C{int}
@return: The version number of the tombstone written, or C{None} if an
object already exists at txid.
@rtype: C{int} or C{None}
"""
rr = ramcloud.RejectRules(object_exists=True)
blob = serialize(Tombstone())
try:
return RAMCloud.write_rr(self, self.tx_table, txid, blob, rr)
except ramcloud.ObjectExistsError:
return None
def _clean(self, table_id, key, txid, timeout):
"""Clean a mask off of an object.
This method won't necessarily do anything productive every time it's
called, so it should be called inside a retry loop.
@param table_id: The table containing the masked object.
@type table_id: C{int}
@param key: The object ID of the masked object.
@type key: C{int}
@param txid: The transaction which is masking the object.
@type txid: C{int}
@param timeout: The time at which the mask expires.
@type timeout: C{int}
"""
rr = ramcloud.RejectRules(object_doesnt_exist=True)
try:
mtdata, mtversion = RAMCloud.read_rr(self, self.tx_table, txid, rr)
except ramcloud.NoObjectError:
# txid doesn't exist, so wait or write tombstone
if time.time() > timeout:
# try to write a tombstone
if self._write_tombstone(txid) is not None:
self._unmask_object(table_id, key, txid)
# caller will retry if the object still needs cleaning
else:
try:
mt = unserialize(mtdata)
except pickle.BadPickleGet:
raise self.InconsistencyError("Not a pickled object")
if type(mt) == Tombstone:
self._unmask_object(table_id, key, txid)
elif type(mt) == MiniTransaction:
self._finish_mt(mt, txid, mtversion)
else:
raise self.InconsistencyError("Not a MiniTransaction or " +
"Tombstone")
def _read_rr(self, table_id, key, user_reject_rules):
# We can't reject for object_exists in RAMCloud.read_rr because of seed
# objects, so we handle it later.
reject_rules = ramcloud.RejectRules(object_doesnt_exist=True,
version_eq_given=user_reject_rules.version_eq_given,
version_gt_given=user_reject_rules.version_gt_given,
given_version=user_reject_rules.given_version)
start = time.time()
for retry in RetryStrategy():
blob, version = RAMCloud.read_rr(self, table_id, key, reject_rules)
txid, timeout, data = unpack(blob)
if txid:
# there's plenty of room for optimizing round trips here
timeout = min(start + 1, timeout)
self._clean(table_id, key, txid, timeout)
retry.later()
else:
# not masked
if user_reject_rules.object_exists:
raise ramcloud.ObjectExistsError()
else:
return data, version
def read_rr(self, table_id, key, reject_rules):
# Yep, this appears to be redundant. The names of the arguments are
# part of the API inherited from ramcloud.RAMCloud, so we relay to a
# private method that can name its arguments as it pleases.
return self._read_rr(table_id, key, reject_rules)
def _delete_unsafe(self, table_id, key, user_reject_rules):
# - Throws L{ramcloud.NoObjectError} if user_reject_rules specifies
# object_doesnt_exist and there is no object to be deleted.
# - Doesn't throw L{ramcloud.ObjectExists}.
# - Throws L{ramcloud.VersionError} if user_reject_rules specifies
# version_eq_given and the version read matches the version given
# or is older than the version given.
assert not user_reject_rules.object_exists
assert not user_reject_rules.version_gt_given
for retry in RetryStrategy():
try:
# self.read_rr() makes sure there is object is not masked
version = self.read_rr(table_id, key, user_reject_rules)[1]
except (ramcloud.VersionError):
raise
except ramcloud.NoObjectError:
# If there was no object, we're done.
if user_reject_rules.object_doesnt_exist:
raise
else:
return
# We can stop worrying about the user's given version now:
# If user_reject_rules.version_eq_given, there is a given version
# that we must reject. But if we've made it this far, the version
# we read is greater than user_reject_rules.given_version.
# However, we still have to worry about NoObjectError iff
# user_reject_rules.doesnt_exist.
# There was an object when we called read. We're definitely good if
# we delete the exact version we read. If there is no object to
# delete now, we want our delete rejected iff user_reject_rules has
# object_doesnt_exist.
reject_rules = ramcloud.RejectRules(
object_doesnt_exist=user_reject_rules.object_doesnt_exist,
version_gt_given=True, given_version=version)
try:
return RAMCloud.delete_rr(self, table_id, key, reject_rules)
except ramcloud.NoObjectError:
if user_reject_rules.object_doesnt_exist:
raise
else:
return
except ramcloud.VersionError:
retry.later()
def delete_rr(self, table_id, key, reject_rules):
if reject_rules.object_exists or reject_rules.version_gt_given:
# these cases are safe
return RAMCloud.delete_rr(self, table_id, key, reject_rules)
else:
return self._delete_unsafe(table_id, key, reject_rules)
def _update_unsafe(self, table_id, key, blob, user_reject_rules):
# - Throws L{ramcloud.NoObjectError} if there is no object to be
# updated.
# - Doesn't throw L{ramcloud.ObjectExists}.
# - Throws L{ramcloud.VersionError} if user_reject_rules specifies
# version_eq_given and the version read matches the version given
# or is older than the version given.
assert user_reject_rules.object_doesnt_exist
assert not user_reject_rules.object_exists
assert not user_reject_rules.version_gt_given
for retry in RetryStrategy():
# self.read_rr() makes sure the object is not masked
try:
version = self.read_rr(table_id, key, user_reject_rules)[1]
except (ramcloud.NoObjectError, ramcloud.VersionError):
raise
# We can stop worrying about the user's given version now
# but still have to worry about NoObjectError.
reject_rules = ramcloud.RejectRules.exactly(version)
try:
return RAMCloud.write_rr(self, table_id, key, blob,
reject_rules)
except ramcloud.NoObjectError:
raise
except ramcloud.VersionError:
retry.later()
def _write_unsafe(self, table_id, key, blob, user_reject_rules):
assert not user_reject_rules.object_doesnt_exist
assert not user_reject_rules.object_exists
assert not user_reject_rules.version_gt_given
for retry in RetryStrategy():
try:
# self.read_rr() makes sure the object is not masked
version = self.read_rr(table_id, key, user_reject_rules)[1]
except ramcloud.NoObjectError:
# If there was no object, we demand that there be no object.
reject_rules = ramcloud.RejectRules(object_exists=True)
else:
# If there was an object, we demand that there be either no
# object or this particular version.
reject_rules = ramcloud.RejectRules(version_gt_given=True,
given_version=version)
try:
return RAMCloud.write_rr(self, table_id, key, blob,
reject_rules)
except (ramcloud.ObjectExistsError, ramcloud.VersionError):
retry.later()
def write_rr(self, table_id, key, data, reject_rules):
blob = pack(0, 0, data)
if reject_rules.object_exists or reject_rules.version_gt_given:
# these cases are safe
return RAMCloud.write_rr(self, table_id, key, blob, reject_rules)
else:
if reject_rules.object_doesnt_exist:
return self._update_unsafe(table_id, key, blob, reject_rules)
else:
return self._write_unsafe(table_id, key, blob, reject_rules)
# begin coordinator:
def _unmask_object(self, table_id, key, txid):
"""Ensure an object is not masked by a particular transaction.
This operation is idempotent.
@param table_id: The table containing the possibly masked object.
@type table_id: C{int}
@param key: The object ID of the possibly masked object.
@type key: C{int}
@param txid: The transaction which may be masking the object.
@type txid: C{int}
"""
for retry in RetryStrategy():
rr_read = ramcloud.RejectRules(object_doesnt_exist=True)
try:
blob, version = RAMCloud.read_rr(self, table_id, key, rr_read)
except ramcloud.NoObjectError:
# the object doesn't exist, so it's most certainly not masked
return
otxid, otimeout, data = unpack(blob)
if otxid == 0 or otxid != txid:
# the object is already not masked by this transaction
return
rr_change = ramcloud.RejectRules(version_gt_given=True,
given_version=version)
if data is None:
# seed object
try:
RAMCloud.delete_rr(self, table_id, key, rr_change)
except ramcloud.VersionError:
retry.later()
else:
# full object
blob = pack(0, 0, data)
try:
RAMCloud.write_rr(self, table_id, key, blob, rr_change)
except ramcloud.VersionError:
retry.later()
def _unmask_objects(self, objects, txid):
"""Ensure several objects are not masked by a particular transaction.
This operation is idempotent.
@param objects: A list of (table_id, key) tuples of possibly masked
objects.
@type objects: C{list}
@param txid: The transaction which may be masking the object.
@type txid: C{int}
"""
for (table_id, key) in objects:
self._unmask_object(table_id, key, txid)
def _mask_object(self, table_id, key, txid, timeout, user_reject_rules):
"""Mask an object by a particular transaction.
If the object is already masked, this method will wait for it to become
unmasked.
@warning: Unlike L{_unmask_object}, this operation is B{not} idempotent.
@param table_id: The table containing the object to mask.
@type table_id: C{int}
@param key: The object ID to mask.
@type key: C{int}
@param txid: The transaction which will mask the object.
@type txid: C{int}
@param timeout: The time at which to expire the mask.
@type timeout: C{int}
@param user_reject_rules: The reasons for which to abort masking the
object.
@type user_reject_rules: L{ramcloud.RejectRules}
@raise Exception: If the object could not be masked because of
C{user_reject_rules}. Specifically, this is one of:
L{ramcloud.NoObjectError},
L{ramcloud.ObjectExistsError}, or
L{ramcloud.VersionError}.
Additionally, the exception will have the attributes
C{table} and C{oid} set to parameter C{table_id} and
C{key}.
@return: The new version of the newly masked object.
@rtype: C{int}
"""
for retry in RetryStrategy():
# self.read_rr() requires object_doesnt_exist to be set, but we can
# still use the user's other reject rules.
rr_read = ramcloud.RejectRules(
object_doesnt_exist=True,
object_exists=user_reject_rules.object_exists,
version_eq_given=user_reject_rules.version_eq_given,
version_gt_given=user_reject_rules.version_gt_given,
given_version=user_reject_rules.given_version)
try:
data, version = self.read_rr(table_id, key, rr_read)
except (ramcloud.ObjectExistsError, ramcloud.VersionError), e:
# The user asked for a reject
e.table = table_id
e.oid = key
raise
except ramcloud.NoObjectError, e:
if user_reject_rules.object_doesnt_exist:
# The user asked for a reject
e.table = table_id
e.oid = key
raise
else:
# The user didn't ask for a reject, but the object doesn't
# exist. We need to create a seed and must fail if an
# object is already present.
rr_write = ramcloud.RejectRules(object_exists=True)
blob = pack(txid, timeout, data=None)
else:
# There was an object when we called read. It has no mask, and
# it isn't a seed. We need to write over the exact version we
# read.
rr_write = ramcloud.RejectRules.exactly(version)
blob = pack(txid, timeout, data)
# Now, blob and rr_write are set and we can attempt a write.
try:
return RAMCloud.write_rr(self, table_id, key, blob, rr_write)
except (ramcloud.ObjectExistsError, ramcloud.NoObjectError,
ramcloud.VersionError):
retry.later()
def _mask_objects(self, objects, mt, txid, timeout):
"""Mask the objects in a transaction.
If any exceptions are raised, this method will try unmask all the
objects that it has masked before raising it to the caller.
@warning: This method masks the objects in the order given in
C{objects}. If the object is already masked, this method will
wait for it to become unmasked. The caller is in charge of
ensuring there won't be deadlock.
@warning: Unlike L{_unmask_objects}, this operation is B{not}
idempotent.
@param objects: a list of (table, key) pairs of objects to mask
@type objects: list
@param mt: The transaction with which to mask C{objects}.
@type mt: L{MiniTransaction}
@param txid: The transaction ID.
@type txid: C{int}
@param timeout: The time at which to expire the mask.
@type timeout: C{int}
@raise Exception: If some object could not be masked because of
C{user_reject_rules}. See L{_mask_object}.
@return: A mapping from (table_id, oid) to the version number of the
object with the mask added.
@rtype: C{dict}
"""
masked_versions = {}
try:
for (table_id, key) in objects:
op = mt[(table_id, key)]
masked_version = self._mask_object(table_id, key, txid, timeout,
op.reject_rules)
masked_versions[(table_id, key)] = masked_version
except:
# the order to _unmask_objects shouldn't matter
self._unmask_objects(masked_versions.keys(), txid)
raise
return masked_versions
def _write_mt(self, mt, txid):
"""Write out the minitransaction intent to RAMCloud.
Uses the table identified by C{txramcloud.tx_table} and the transaction
id stored in C{mt}.
@param mt: the minitransaction
@type mt: L{MiniTransaction}
@param txid: the object ID at which to write the minitransaction
@type txid: C{int}
@return: the version of the object written
@rtype: L{int}
@raise TransactionExpired: A tombstone is in the way.
"""
rr = ramcloud.RejectRules(object_exists=True)
try:
return RAMCloud.write_rr(self, self.tx_table, txid,
serialize(mt), rr)
except ramcloud.ObjectExistsError:
# looks like a tombstone is here
raise self.TransactionExpired()
def _delete_mt(self, txid, version):
"""Ensure a minitransaction intent has been deleted from RAMCloud.
@precondition: No references exist to C{txid} in the RAMCloud.
@param txid: the transaction id of the minitransaction to delete
@type txid: C{int}
@param version: the version number of the mt when it was read
@type version: C{int}
"""
rr = ramcloud.RejectRules(version_gt_given=True, given_version=version)
try:
RAMCloud.delete_rr(self, self.tx_table, txid, rr)
except ramcloud.VersionError:
raise self.InconsistencyError("txid has been modified")
def _delete_tombstone(self, txid):
"""Ensure a tombstone has been deleted from RAMCloud.
@warning: No one should ever commit a transaction under txid following
this operation.
@param txid: the transaction id of the tombstone to delete
@type txid: C{int}
"""
rr = ramcloud.RejectRules()
RAMCloud.delete_rr(self, self.tx_table, txid, rr)
def _apply_op(self, table_id, key, txid, op):
"""Apply the operation to its masked object.
@param table_id: The table containing the possibly masked object.
@type table_id: C{int}
@param key: The object ID of the possibly masked object.
@type key: C{int}
@param op: the operation to apply
@type op: L{MTOperation}
"""
read_rr = ramcloud.RejectRules(object_doesnt_exist=True)
try:
blob, version = RAMCloud.read_rr(self, table_id, key, read_rr)
except ramcloud.NoObjectError:
# does not exist, so not masked by txid
return
otxid, otimeout, odata = unpack(blob)
if otxid == 0 or otxid != txid:
# not masked by txid
return
# So, txid is indeed masking the object we read.
if type(op) == MTOperation:
# no op
# remove mask / delete seed
data = odata
elif type(op) == MTWrite:
data = op.data
elif type(op) == MTDelete:
data = None
else:
raise TxRAMCloud.InconsistencyError("Unknown type in MT: %s" %
type(op))
# Now, (data is None) determines whether we need to delete or update the
# object at version.
update_rr = ramcloud.RejectRules.exactly(version)
if data is None:
# delete the object at version
try:
RAMCloud.delete_rr(self, table_id, key, update_rr)
except (ramcloud.NoObjectError, ramcloud.VersionError):
# we must be racing another client to clean this up
return
else:
# update the object at version with data
blob = pack(0, 0, data)
try:
RAMCloud.write_rr(self, table_id, key, blob, update_rr)
except (ramcloud.NoObjectError, ramcloud.VersionError):
# we must be racing another client to clean this up
return
def _apply_mt(self, mt, txid):
"""Apply the minitransaction's operations to its masked objects.
@param mt: the minitransaction to apply
@type mt: L{MiniTransaction}
@param txid: the transaction ID which masks the objects in the transaction
@type txid: C{int}
"""
for ((table_id, key), op) in mt.items():
self._apply_op(table_id, key, txid, op)
def _finish_mt(self, mt, txid, version):
"""Complete/clean up a minitransaction after the point of no return.
@postcondition: No object will be masked by the minitransaction and the
mt will not exist in the system.
@param mt: the minitransaction to clean up
@type mt: L{MiniTransaction}
@param txid: the transaction ID which masks the objects
@type txid: C{int}
@param version: the version number of mt when it was read
@type version: C{int}
"""
self._apply_mt(mt, txid)
self._delete_mt(txid, version)
def mt_commit(self, mt):
"""Execute and commit a prepared minitransaction.
@param mt: the prepared minitransaction to be executed
@type mt: L{MiniTransaction}
@raise TransactionRejected: The minitransaction aborted due to reject
rules.
@raise TransactionExpired: The minitransaction was aborted by another
client.
@return: A mapping from (table_id, oid) to version numbers. There will
be one entry for each object in the minitransaction:
- Those objects that were deleted will have None as the verison number.
- Those objects that were written will have their new version number.
- Those objects that had no operation applied (unfortunately) also
changed version numbers, and their new version is returned here.
@rtype: C{dict}
"""
# reserve a new transaction ID. 0 is not valid.
txid = 0
while txid == 0:
txid = self.txid_res.next()
timeout = time.time() + 30
# mask objects (in sorted order to guarantee no deadlock)
objects = sorted(mt.keys())
try:
masked_versions = self._mask_objects(objects, mt, txid, timeout)
except (ramcloud.NoObjectError, ramcloud.ObjectExistsError,
ramcloud.VersionError), e:
a = self.TransactionRejected()
a.reasons[(e.table, e.oid)] = e
raise a
assert len(masked_versions) == len(objects)
# TODO: optimization: add masked_versions to mt intent, then use them in
# _unmask_objects and _finish_mt to save the RAMCloud.read_rr call in
# _unmask_object.
# write out minitransaction intent
try:
version = self._write_mt(mt, txid)
except:
self._unmask_objects(objects, txid)
self._delete_tombstone(txid)
raise
# no turning back now
self._finish_mt(mt, txid, version)
# Assume unmasked version is 1 greater than masked version,
# unless the operation was a delete.
unmasked_versions = {}
for ((t, o), v) in masked_versions.items():
if type(mt[(t, o)]) == MTDelete:
unmasked_versions[(t, o)] = None
else:
assert type(mt[(t, o)]) in [MTOperation, MTWrite]
unmasked_versions[(t, o)] = v + 1
return unmasked_versions
# struct
class Tombstone(object):
pass
# struct
class MiniTransaction(dict):
pass
# struct
class MTOperation(object):
def __init__(self, reject_rules):
self.reject_rules = reject_rules
# struct
class MTWrite(MTOperation):
def __init__(self, data, reject_rules):
MTOperation.__init__(self, reject_rules)
self.data = data
# struct
class MTDelete(MTOperation):
pass
def main():
global r
r = TxRAMCloud(7)
print "Client: 0x%x" % r.client
r.connect()
r.ping()
r.create_table("test")
print "Created table 'test'",
table = r.get_table_id("test")
print "with id %s" % table
r.create(table, 0, "Hello, World, from Python")
print "Inserted to table"
value, got_version = r.read(table, 0)
print value
key = r.insert(table, "test")
print "Inserted value and got back key %d" % key
r.update(table, key, "test")
bs = "binary\00safe?"
oid = r.insert(table, bs)
assert r.read(table, oid)[0] == bs
r.drop_table("test")
if __name__ == '__main__':
main()
| isc |
summer-liu/events_cache_scripts | report/runDataAnalysis03.py | 1 | 1722 | # _*_ coding:utf-8 _*_
from dataFunctions import *
import newUserThatDay as td
import newUserNextWeek as nw
import scene as sc
import timeAnalysis as ta
from bson.objectid import ObjectId
# calculate weekly top 10 topics
def weeklyTopicsEnterTop10(startDate, endDate):
pipeline = [
{
"$match": {
"eventKey": "enterTopic",
"serverTime": {
"$gte": startDate,
"$lt": endDate
}
}
},
{
"$group": {
"_id": "$eventValue.topicId",
"count": {"$sum": 1}
}
},
{
"$sort": {
"count": DESCENDING
}
}
]
x = list(events.aggregate(pipeline))[:10]
return [t['_id'] for t in x]
s = time.time()
START_DATE = datetime.datetime(2016, 1, 3, 0)
END_DATE = datetime.datetime(2016, 1, 10, 0)
START_DATE_UTC = START_DATE - datetime.timedelta(hours=8)
END_DATE_UTC = END_DATE - datetime.timedelta(hours=8)
topicIds = weeklyTopicsEnterTop10(START_DATE_UTC, END_DATE_UTC)
topic_list = []
for t in topicIds:
to = topics.find_one({"_id": ObjectId(t)}, {'name': 1})
topic_list.append({"_id": str(to['_id']), "name": to['name']})
print "---------- that day ----------"
td.data_by_day(START_DATE_UTC, END_DATE_UTC)
print "---------- next week ----------"
nw.next_week(START_DATE_UTC, END_DATE_UTC)
print "---------- scene ----------"
sc.print_topic_scene(topic_list, START_DATE_UTC, END_DATE_UTC)
print "--------- time analysis ----------"
ta.print_time_analysis(topic_list, START_DATE_UTC, END_DATE_UTC)
e = time.time()
print 'Total time: ', (e-s)/ 60, 'min'
| mit |
Novasoft-India/OperERP-AM-Motors | openerp/addons/portal/portal.py | 14 | 1362 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class portal(osv.osv):
""" A portal is simply a group of users with the flag 'is_portal' set to True.
The flag 'is_portal' makes a user group usable as a portal.
"""
_inherit = 'res.groups'
_columns = {
'is_portal': fields.boolean('Portal', help="If checked, this group is usable as a portal."),
}
| agpl-3.0 |
infobloxopen/infoblox-netmri | infoblox_netmri/api/broker/v2_8_0/netscreen_address_broker.py | 16 | 62435 | from ..broker import Broker
class NetscreenAddressBroker(Broker):
controller = "netscreen_addresses"
def index(self, **kwargs):
"""Lists the available netscreen addresses. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of each device from which netscreen address table entry was found.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of each device from which netscreen address table entry was found.
:type DeviceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the netscreen addresses as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of netscreen address methods. The listed methods will be called on each netscreen address returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` NSAddressID
:param sort: The data field(s) to use for sorting the output. Default is NSAddressID. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each NetscreenAddress. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return netscreen_addresses: An array of the NetscreenAddress objects that match the specified input criteria.
:rtype netscreen_addresses: Array of NetscreenAddress
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified netscreen address.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of netscreen address methods. The listed methods will be called on each netscreen address returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return netscreen_address: The netscreen address identified by the specified NSAddressID.
:rtype netscreen_address: NetscreenAddress
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available netscreen addresses matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of each device from which netscreen address table entry was found.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of each device from which netscreen address table entry was found.
:type DeviceID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressChangedCols: The fields that changed between this revision of the record and the previous revision.
:type NSAddressChangedCols: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressChangedCols: The fields that changed between this revision of the record and the previous revision.
:type NSAddressChangedCols: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressDomainDotted: The management Domain IP address of the netscreen address is dotted(or colon delimited for IPv6) format.
:type NSAddressDomainDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressDomainDotted: The management Domain IP address of the netscreen address is dotted(or colon delimited for IPv6) format.
:type NSAddressDomainDotted: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressDomainNumeric: The numerical value of Domain IP address in the netscreen address.
:type NSAddressDomainNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressDomainNumeric: The numerical value of Domain IP address in the netscreen address.
:type NSAddressDomainNumeric: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressEndTime: The ending effective time of this record, or empty if still in effect.
:type NSAddressEndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressEndTime: The ending effective time of this record, or empty if still in effect.
:type NSAddressEndTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressIndex: The current index value of the local interface for the netscreen address.
:type NSAddressIndex: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressIndex: The current index value of the local interface for the netscreen address.
:type NSAddressIndex: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressIpDotted: The management IP address of the netscreen address is dotted(or colon delimited for IPv6) format.
:type NSAddressIpDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressIpDotted: The management IP address of the netscreen address is dotted(or colon delimited for IPv6) format.
:type NSAddressIpDotted: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressIpNumeric: The numerical value of the remote IP address in the netscreen address.
:type NSAddressIpNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressIpNumeric: The numerical value of the remote IP address in the netscreen address.
:type NSAddressIpNumeric: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressName: The name of the netscreen address.
:type NSAddressName: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressName: The name of the netscreen address.
:type NSAddressName: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressNetmaskDotted: The management netmask IP address of netscreen address is dotted (or colon delimited for IPv6) format.
:type NSAddressNetmaskDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressNetmaskDotted: The management netmask IP address of netscreen address is dotted (or colon delimited for IPv6) format.
:type NSAddressNetmaskDotted: Array of String
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressNetmaskNumeric: The numerical value of netmask in the netscreen address.
:type NSAddressNetmaskNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressNetmaskNumeric: The numerical value of netmask in the netscreen address.
:type NSAddressNetmaskNumeric: Array of Integer
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressStartTime: The starting effective time of this record.
:type NSAddressStartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressStartTime: The starting effective time of this record.
:type NSAddressStartTime: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressTimeStamp: The date and time of netscreen address was calculated or collected.
:type NSAddressTimeStamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressTimeStamp: The date and time of netscreen address was calculated or collected.
:type NSAddressTimeStamp: Array of DateTime
| ``api version min:`` 2.4
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param NSAddressZone: The zone of the netscreen address.
:type NSAddressZone: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param NSAddressZone: The zone of the netscreen address.
:type NSAddressZone: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the netscreen addresses as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of netscreen address methods. The listed methods will be called on each netscreen address returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` NSAddressID
:param sort: The data field(s) to use for sorting the output. Default is NSAddressID. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each NetscreenAddress. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against netscreen addresses, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: DataSourceID, DeviceID, NSAddressChangedCols, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressEndTime, NSAddressID, NSAddressIndex, NSAddressIpDotted, NSAddressIpNumeric, NSAddressName, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, NSAddressStartTime, NSAddressTimeStamp, NSAddressZone.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return netscreen_addresses: An array of the NetscreenAddress objects that match the specified input criteria.
:rtype netscreen_addresses: Array of NetscreenAddress
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available netscreen addresses matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: DataSourceID, DeviceID, NSAddressChangedCols, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressEndTime, NSAddressID, NSAddressIndex, NSAddressIpDotted, NSAddressIpNumeric, NSAddressName, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, NSAddressStartTime, NSAddressTimeStamp, NSAddressZone.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier of each device from which netscreen address table entry was found. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressChangedCols: The operator to apply to the field NSAddressChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressChangedCols: If op_NSAddressChangedCols is specified, the field named in this input will be compared to the value in NSAddressChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressChangedCols must be specified if op_NSAddressChangedCols is specified.
:type val_f_NSAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressChangedCols: If op_NSAddressChangedCols is specified, this value will be compared to the value in NSAddressChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressChangedCols must be specified if op_NSAddressChangedCols is specified.
:type val_c_NSAddressChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressDomainDotted: The operator to apply to the field NSAddressDomainDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressDomainDotted: The management Domain IP address of the netscreen address is dotted(or colon delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressDomainDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressDomainDotted: If op_NSAddressDomainDotted is specified, the field named in this input will be compared to the value in NSAddressDomainDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressDomainDotted must be specified if op_NSAddressDomainDotted is specified.
:type val_f_NSAddressDomainDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressDomainDotted: If op_NSAddressDomainDotted is specified, this value will be compared to the value in NSAddressDomainDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressDomainDotted must be specified if op_NSAddressDomainDotted is specified.
:type val_c_NSAddressDomainDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressDomainNumeric: The operator to apply to the field NSAddressDomainNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressDomainNumeric: The numerical value of Domain IP address in the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressDomainNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressDomainNumeric: If op_NSAddressDomainNumeric is specified, the field named in this input will be compared to the value in NSAddressDomainNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressDomainNumeric must be specified if op_NSAddressDomainNumeric is specified.
:type val_f_NSAddressDomainNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressDomainNumeric: If op_NSAddressDomainNumeric is specified, this value will be compared to the value in NSAddressDomainNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressDomainNumeric must be specified if op_NSAddressDomainNumeric is specified.
:type val_c_NSAddressDomainNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressEndTime: The operator to apply to the field NSAddressEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressEndTime: If op_NSAddressEndTime is specified, the field named in this input will be compared to the value in NSAddressEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressEndTime must be specified if op_NSAddressEndTime is specified.
:type val_f_NSAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressEndTime: If op_NSAddressEndTime is specified, this value will be compared to the value in NSAddressEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressEndTime must be specified if op_NSAddressEndTime is specified.
:type val_c_NSAddressEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressID: The operator to apply to the field NSAddressID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressID: The internal NetMRI identifier of netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressID: If op_NSAddressID is specified, the field named in this input will be compared to the value in NSAddressID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressID must be specified if op_NSAddressID is specified.
:type val_f_NSAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressID: If op_NSAddressID is specified, this value will be compared to the value in NSAddressID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressID must be specified if op_NSAddressID is specified.
:type val_c_NSAddressID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressIndex: The operator to apply to the field NSAddressIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressIndex: The current index value of the local interface for the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressIndex: If op_NSAddressIndex is specified, the field named in this input will be compared to the value in NSAddressIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressIndex must be specified if op_NSAddressIndex is specified.
:type val_f_NSAddressIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressIndex: If op_NSAddressIndex is specified, this value will be compared to the value in NSAddressIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressIndex must be specified if op_NSAddressIndex is specified.
:type val_c_NSAddressIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressIpDotted: The operator to apply to the field NSAddressIpDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressIpDotted: The management IP address of the netscreen address is dotted(or colon delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressIpDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressIpDotted: If op_NSAddressIpDotted is specified, the field named in this input will be compared to the value in NSAddressIpDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressIpDotted must be specified if op_NSAddressIpDotted is specified.
:type val_f_NSAddressIpDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressIpDotted: If op_NSAddressIpDotted is specified, this value will be compared to the value in NSAddressIpDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressIpDotted must be specified if op_NSAddressIpDotted is specified.
:type val_c_NSAddressIpDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressIpNumeric: The operator to apply to the field NSAddressIpNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressIpNumeric: The numerical value of the remote IP address in the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressIpNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressIpNumeric: If op_NSAddressIpNumeric is specified, the field named in this input will be compared to the value in NSAddressIpNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressIpNumeric must be specified if op_NSAddressIpNumeric is specified.
:type val_f_NSAddressIpNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressIpNumeric: If op_NSAddressIpNumeric is specified, this value will be compared to the value in NSAddressIpNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressIpNumeric must be specified if op_NSAddressIpNumeric is specified.
:type val_c_NSAddressIpNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressName: The operator to apply to the field NSAddressName. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressName: The name of the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressName: If op_NSAddressName is specified, the field named in this input will be compared to the value in NSAddressName using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressName must be specified if op_NSAddressName is specified.
:type val_f_NSAddressName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressName: If op_NSAddressName is specified, this value will be compared to the value in NSAddressName using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressName must be specified if op_NSAddressName is specified.
:type val_c_NSAddressName: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressNetmaskDotted: The operator to apply to the field NSAddressNetmaskDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressNetmaskDotted: The management netmask IP address of netscreen address is dotted (or colon delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressNetmaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressNetmaskDotted: If op_NSAddressNetmaskDotted is specified, the field named in this input will be compared to the value in NSAddressNetmaskDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressNetmaskDotted must be specified if op_NSAddressNetmaskDotted is specified.
:type val_f_NSAddressNetmaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressNetmaskDotted: If op_NSAddressNetmaskDotted is specified, this value will be compared to the value in NSAddressNetmaskDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressNetmaskDotted must be specified if op_NSAddressNetmaskDotted is specified.
:type val_c_NSAddressNetmaskDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressNetmaskNumeric: The operator to apply to the field NSAddressNetmaskNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressNetmaskNumeric: The numerical value of netmask in the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressNetmaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressNetmaskNumeric: If op_NSAddressNetmaskNumeric is specified, the field named in this input will be compared to the value in NSAddressNetmaskNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressNetmaskNumeric must be specified if op_NSAddressNetmaskNumeric is specified.
:type val_f_NSAddressNetmaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressNetmaskNumeric: If op_NSAddressNetmaskNumeric is specified, this value will be compared to the value in NSAddressNetmaskNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressNetmaskNumeric must be specified if op_NSAddressNetmaskNumeric is specified.
:type val_c_NSAddressNetmaskNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressStartTime: The operator to apply to the field NSAddressStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressStartTime: If op_NSAddressStartTime is specified, the field named in this input will be compared to the value in NSAddressStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressStartTime must be specified if op_NSAddressStartTime is specified.
:type val_f_NSAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressStartTime: If op_NSAddressStartTime is specified, this value will be compared to the value in NSAddressStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressStartTime must be specified if op_NSAddressStartTime is specified.
:type val_c_NSAddressStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressTimeStamp: The operator to apply to the field NSAddressTimeStamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressTimeStamp: The date and time of netscreen address was calculated or collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressTimeStamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressTimeStamp: If op_NSAddressTimeStamp is specified, the field named in this input will be compared to the value in NSAddressTimeStamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressTimeStamp must be specified if op_NSAddressTimeStamp is specified.
:type val_f_NSAddressTimeStamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressTimeStamp: If op_NSAddressTimeStamp is specified, this value will be compared to the value in NSAddressTimeStamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressTimeStamp must be specified if op_NSAddressTimeStamp is specified.
:type val_c_NSAddressTimeStamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_NSAddressZone: The operator to apply to the field NSAddressZone. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. NSAddressZone: The zone of the netscreen address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_NSAddressZone: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_NSAddressZone: If op_NSAddressZone is specified, the field named in this input will be compared to the value in NSAddressZone using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_NSAddressZone must be specified if op_NSAddressZone is specified.
:type val_f_NSAddressZone: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_NSAddressZone: If op_NSAddressZone is specified, this value will be compared to the value in NSAddressZone using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_NSAddressZone must be specified if op_NSAddressZone is specified.
:type val_c_NSAddressZone: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the netscreen addresses as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of netscreen address methods. The listed methods will be called on each netscreen address returned and included in the output. Available methods are: device.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: device.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` NSAddressID
:param sort: The data field(s) to use for sorting the output. Default is NSAddressID. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each NetscreenAddress. Valid values are NSAddressID, DeviceID, NSAddressStartTime, NSAddressEndTime, NSAddressTimeStamp, NSAddressChangedCols, NSAddressIndex, NSAddressName, NSAddressZone, NSAddressIpDotted, NSAddressIpNumeric, NSAddressDomainDotted, NSAddressDomainNumeric, NSAddressNetmaskDotted, NSAddressNetmaskNumeric, DataSourceID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return netscreen_addresses: An array of the NetscreenAddress objects that match the specified input criteria.
:rtype netscreen_addresses: Array of NetscreenAddress
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def data_source(self, **kwargs):
"""The collector NetMRI that collected this data record.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The collector NetMRI that collected this data record.
:rtype : DataSource
"""
return self.api_request(self._get_method_fullname("data_source"), kwargs)
def device(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
def infradevice(self, **kwargs):
"""The device from which this data was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param NSAddressID: The internal NetMRI identifier of netscreen address.
:type NSAddressID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this data was collected.
:rtype : InfraDevice
"""
return self.api_request(self._get_method_fullname("infradevice"), kwargs)
| apache-2.0 |
jramcast/ml_weather | example8/preprocessing.py | 2 | 3314 | """
Module for preprocessing data before
feeding it into the classfier
"""
import string
import re
from nltk.stem import SnowballStemmer
from nltk.corpus import stopwords
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import MinMaxScaler, Imputer
from sklearn.feature_extraction import DictVectorizer
from textblob import TextBlob
class SentimentExtractor(BaseEstimator, TransformerMixin):
"""
Extracts sentiment features from tweets
"""
def __init__(self):
pass
def transform(self, tweets, y_train=None):
samples = []
for tweet in tweets:
textBlob = TextBlob(tweet)
samples.append({
'sent_polarity': textBlob.sentiment.polarity,
'sent_subjetivity': textBlob.sentiment.subjectivity
})
vectorized = DictVectorizer().fit_transform(samples).toarray()
vectorized = Imputer().fit_transform(vectorized)
vectorized_scaled = MinMaxScaler().fit_transform(vectorized)
return vectorized_scaled
def fit(self, X, y=None):
return self
class TempExtractor(BaseEstimator, TransformerMixin):
"""
Extracts weather temp from tweet
"""
def transform(self, tweets, y_train=None):
tempetures = [[self.get_temperature(tweet)] for tweet in tweets]
vectorized = self.imputer.transform(tempetures)
vectorized_scaled = MinMaxScaler().fit_transform(vectorized)
return vectorized_scaled
def fit(self, tweets, y=None):
self.imputer = Imputer()
tempetures = [[self.get_temperature(tweet)] for tweet in tweets]
self.imputer.fit(tempetures)
return self
def get_temperature(self, tweet):
match = re.search(r'(\d+(\.\d)?)\s*F', tweet, re.IGNORECASE)
if match:
value = float(match.group(1))
celsius = (value - 32) / 1.8
if - 100 < celsius < 100:
return celsius
return None
class WindExtractor(BaseEstimator, TransformerMixin):
"""
Extracts wind from tweet
"""
def transform(self, tweets, y_train=None):
winds = [[self.get_wind(tweet)] for tweet in tweets]
vectorized = self.imputer.transform(winds)
vectorized_scaled = MinMaxScaler().fit_transform(vectorized)
return vectorized_scaled
def fit(self, tweets, y=None):
self.imputer = Imputer()
winds = [[self.get_wind(tweet)] for tweet in tweets]
self.imputer.fit(winds)
return self
def get_wind(self, tweet):
match = re.search(r'(\d+(\.\d)?)\s*mph', tweet, re.IGNORECASE)
if match:
value = float(match.group(1))
kph = value * 1.60934
if 0 <= kph < 500:
return kph
return None
stopwords_list = stopwords.words('english')
def stem_tokens(tokens, stemmer):
stemmer = SnowballStemmer('english')
stemmed = []
for item in tokens:
stemmed.append(stemmer.stem(item))
return stemmed
def tokenize(text):
non_words = list(string.punctuation)
non_words.extend(['¿', '¡'])
text = ''.join([c for c in text if c not in non_words])
sentence = TextBlob(text)
tokens = [word.lemmatize() for word in sentence.words]
return tokens
| apache-2.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/operations/backup_policies_operations.py | 2 | 5063 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class BackupPoliciesOperations(object):
"""BackupPoliciesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2016-12-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def list(
self, vault_name, resource_group_name, filter=None, custom_headers=None, raw=False, **operation_config):
"""Lists of backup policies associated with Recovery Services Vault. API
provides pagination parameters to fetch scoped results.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the
recovery services vault is present.
:type resource_group_name: str
:param filter: OData filter options.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of :class:`ProtectionPolicyResource
<azure.mgmt.recoveryservicesbackup.models.ProtectionPolicyResource>`
:rtype: :class:`ProtectionPolicyResourcePaged
<azure.mgmt.recoveryservicesbackup.models.ProtectionPolicyResourcePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupPolicies'
path_format_arguments = {
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ProtectionPolicyResourcePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ProtectionPolicyResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| mit |
Krienas/allianceauth | allianceauth/authentication/migrations/0016_ownershiprecord.py | 4 | 1637 | # Generated by Django 2.0.4 on 2018-04-14 18:28
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
def create_initial_records(apps, schema_editor):
OwnershipRecord = apps.get_model('authentication', 'OwnershipRecord')
CharacterOwnership = apps.get_model('authentication', 'CharacterOwnership')
OwnershipRecord.objects.bulk_create([
OwnershipRecord(user=o.user, character=o.character, owner_hash=o.owner_hash) for o in CharacterOwnership.objects.all()
])
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('eveonline', '0009_on_delete'),
('authentication', '0015_user_profiles'),
]
operations = [
migrations.CreateModel(
name='OwnershipRecord',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owner_hash', models.CharField(db_index=True, max_length=28)),
('created', models.DateTimeField(auto_now=True)),
('character', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ownership_records', to='eveonline.EveCharacter')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ownership_records', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created'],
},
),
migrations.RunPython(create_initial_records, migrations.RunPython.noop)
]
| gpl-2.0 |
nikolas/edx-platform | cms/djangoapps/contentstore/views/tests/utils.py | 198 | 2922 | """
Utilities for view tests.
"""
import json
from contentstore.tests.utils import CourseTestCase
from contentstore.views.helpers import xblock_studio_url
from xmodule.modulestore.tests.factories import ItemFactory
class StudioPageTestCase(CourseTestCase):
"""
Base class for all tests of Studio pages.
"""
def setUp(self):
super(StudioPageTestCase, self).setUp()
self.chapter = ItemFactory.create(parent_location=self.course.location,
category='chapter', display_name="Week 1")
self.sequential = ItemFactory.create(parent_location=self.chapter.location,
category='sequential', display_name="Lesson 1")
def get_page_html(self, xblock):
"""
Returns the HTML for the page representing the xblock.
"""
url = xblock_studio_url(xblock)
self.assertIsNotNone(url)
resp = self.client.get_html(url)
self.assertEqual(resp.status_code, 200)
return resp.content
def get_preview_html(self, xblock, view_name):
"""
Returns the HTML for the xblock when shown within a unit or container page.
"""
preview_url = '/xblock/{usage_key}/{view_name}'.format(usage_key=xblock.location, view_name=view_name)
resp = self.client.get_json(preview_url)
self.assertEqual(resp.status_code, 200)
resp_content = json.loads(resp.content)
return resp_content['html']
def validate_preview_html(self, xblock, view_name, can_add=True):
"""
Verify that the specified xblock's preview has the expected HTML elements.
"""
html = self.get_preview_html(xblock, view_name)
self.validate_html_for_add_buttons(html, can_add)
# Verify drag handles always appear.
drag_handle_html = '<span data-tooltip="Drag to reorder" class="drag-handle action"></span>'
self.assertIn(drag_handle_html, html)
# Verify that there are no action buttons for public blocks
expected_button_html = [
'<a href="#" class="edit-button action-button">',
'<a href="#" data-tooltip="Delete" class="delete-button action-button">',
'<a href="#" data-tooltip="Duplicate" class="duplicate-button action-button">'
]
for button_html in expected_button_html:
self.assertIn(button_html, html)
def validate_html_for_add_buttons(self, html, can_add=True):
"""
Validate that the specified HTML has the appropriate add actions for the current publish state.
"""
# Verify that there are no add buttons for public blocks
add_button_html = '<div class="add-xblock-component new-component-item adding"></div>'
if can_add:
self.assertIn(add_button_html, html)
else:
self.assertNotIn(add_button_html, html)
| agpl-3.0 |
jmesteve/openerp | openerp/addons/base/ir/ir_sequence.py | 43 | 14753 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
import openerp
from openerp.osv import osv
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class ir_sequence_type(openerp.osv.osv.osv):
_name = 'ir.sequence.type'
_order = 'name'
_columns = {
'name': openerp.osv.fields.char('Name', size=64, required=True),
'code': openerp.osv.fields.char('Code', size=32, required=True),
}
_sql_constraints = [
('code_unique', 'unique(code)', '`code` must be unique.'),
]
def _code_get(self, cr, uid, context=None):
cr.execute('select code, name from ir_sequence_type')
return cr.fetchall()
class ir_sequence(openerp.osv.osv.osv):
""" Sequence model.
The sequence model allows to define and use so-called sequence objects.
Such objects are used to generate unique identifiers in a transaction-safe
way.
"""
_name = 'ir.sequence'
_order = 'name'
def _get_number_next_actual(self, cr, user, ids, field_name, arg, context=None):
'''Return number from ir_sequence row when no_gap implementation,
and number from postgres sequence when standard implementation.'''
res = dict.fromkeys(ids)
for element in self.browse(cr, user, ids, context=context):
if element.implementation != 'standard':
res[element.id] = element.number_next
else:
# get number from postgres sequence. Cannot use
# currval, because that might give an error when
# not having used nextval before.
statement = (
"SELECT last_value, increment_by, is_called"
" FROM ir_sequence_%03d"
% element.id)
cr.execute(statement)
(last_value, increment_by, is_called) = cr.fetchone()
if is_called:
res[element.id] = last_value + increment_by
else:
res[element.id] = last_value
return res
def _set_number_next_actual(self, cr, uid, id, name, value, args=None, context=None):
return self.write(cr, uid, id, {'number_next': value or 0}, context=context)
_columns = {
'name': openerp.osv.fields.char('Name', size=64, required=True),
'code': openerp.osv.fields.selection(_code_get, 'Code', size=64),
'implementation': openerp.osv.fields.selection( # TODO update the view
[('standard', 'Standard'), ('no_gap', 'No gap')],
'Implementation', required=True,
help="Two sequence object implementations are offered: Standard "
"and 'No gap'. The later is slower than the former but forbids any"
" gap in the sequence (while they are possible in the former)."),
'active': openerp.osv.fields.boolean('Active'),
'prefix': openerp.osv.fields.char('Prefix', size=64, help="Prefix value of the record for the sequence"),
'suffix': openerp.osv.fields.char('Suffix', size=64, help="Suffix value of the record for the sequence"),
'number_next': openerp.osv.fields.integer('Next Number', required=True, help="Next number of this sequence"),
'number_next_actual': openerp.osv.fields.function(_get_number_next_actual, fnct_inv=_set_number_next_actual, type='integer', required=True, string='Next Number', help='Next number that will be used. This number can be incremented frequently so the displayed value might already be obsolete'),
'number_increment': openerp.osv.fields.integer('Increment Number', required=True, help="The next number of the sequence will be incremented by this number"),
'padding' : openerp.osv.fields.integer('Number Padding', required=True, help="OpenERP will automatically adds some '0' on the left of the 'Next Number' to get the required padding size."),
'company_id': openerp.osv.fields.many2one('res.company', 'Company'),
}
_defaults = {
'implementation': 'standard',
'active': True,
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.sequence', context=c),
'number_increment': 1,
'number_next': 1,
'number_next_actual': 1,
'padding' : 0,
}
def init(self, cr):
return # Don't do the following index yet.
# CONSTRAINT/UNIQUE INDEX on (code, company_id)
# /!\ The unique constraint 'unique_name_company_id' is not sufficient, because SQL92
# only support field names in constraint definitions, and we need a function here:
# we need to special-case company_id to treat all NULL company_id as equal, otherwise
# we would allow duplicate (code, NULL) ir_sequences.
cr.execute("""
SELECT indexname FROM pg_indexes WHERE indexname =
'ir_sequence_unique_code_company_id_idx'""")
if not cr.fetchone():
cr.execute("""
CREATE UNIQUE INDEX ir_sequence_unique_code_company_id_idx
ON ir_sequence (code, (COALESCE(company_id,-1)))""")
def _create_sequence(self, cr, id, number_increment, number_next):
""" Create a PostreSQL sequence.
There is no access rights check.
"""
if number_increment == 0:
raise osv.except_osv(_('Warning!'),_("Increment number must not be zero."))
assert isinstance(id, (int, long))
sql = "CREATE SEQUENCE ir_sequence_%03d INCREMENT BY %%s START WITH %%s" % id
cr.execute(sql, (number_increment, number_next))
def _drop_sequence(self, cr, ids):
""" Drop the PostreSQL sequence if it exists.
There is no access rights check.
"""
ids = ids if isinstance(ids, (list, tuple)) else [ids]
assert all(isinstance(i, (int, long)) for i in ids), \
"Only ids in (int, long) allowed."
names = ','.join('ir_sequence_%03d' % i for i in ids)
# RESTRICT is the default; it prevents dropping the sequence if an
# object depends on it.
cr.execute("DROP SEQUENCE IF EXISTS %s RESTRICT " % names)
def _alter_sequence(self, cr, id, number_increment, number_next=None):
""" Alter a PostreSQL sequence.
There is no access rights check.
"""
if number_increment == 0:
raise osv.except_osv(_('Warning!'),_("Increment number must not be zero."))
assert isinstance(id, (int, long))
seq_name = 'ir_sequence_%03d' % (id,)
cr.execute("SELECT relname FROM pg_class WHERE relkind = %s AND relname=%s", ('S', seq_name))
if not cr.fetchone():
# sequence is not created yet, we're inside create() so ignore it, will be set later
return
statement = "ALTER SEQUENCE %s INCREMENT BY %d" % (seq_name, number_increment)
if number_next is not None:
statement += " RESTART WITH %d" % (number_next, )
cr.execute(statement)
def create(self, cr, uid, values, context=None):
""" Create a sequence, in implementation == standard a fast gaps-allowed PostgreSQL sequence is used.
"""
values = self._add_missing_default_values(cr, uid, values, context)
values['id'] = super(ir_sequence, self).create(cr, uid, values, context)
if values['implementation'] == 'standard':
self._create_sequence(cr, values['id'], values['number_increment'], values['number_next'])
return values['id']
def unlink(self, cr, uid, ids, context=None):
super(ir_sequence, self).unlink(cr, uid, ids, context)
self._drop_sequence(cr, ids)
return True
def write(self, cr, uid, ids, values, context=None):
if not isinstance(ids, (list, tuple)):
ids = [ids]
new_implementation = values.get('implementation')
rows = self.read(cr, uid, ids, ['implementation', 'number_increment', 'number_next'], context)
super(ir_sequence, self).write(cr, uid, ids, values, context)
for row in rows:
# 4 cases: we test the previous impl. against the new one.
i = values.get('number_increment', row['number_increment'])
n = values.get('number_next', row['number_next'])
if row['implementation'] == 'standard':
if new_implementation in ('standard', None):
# Implementation has NOT changed.
# Only change sequence if really requested.
if row['number_next'] != n:
self._alter_sequence(cr, row['id'], i, n)
else:
# Just in case only increment changed
self._alter_sequence(cr, row['id'], i)
else:
self._drop_sequence(cr, row['id'])
else:
if new_implementation in ('no_gap', None):
pass
else:
self._create_sequence(cr, row['id'], i, n)
return True
def _interpolate(self, s, d):
if s:
return s % d
return ''
def _interpolation_dict(self):
t = time.localtime() # Actually, the server is always in UTC.
return {
'year': time.strftime('%Y', t),
'month': time.strftime('%m', t),
'day': time.strftime('%d', t),
'y': time.strftime('%y', t),
'doy': time.strftime('%j', t),
'woy': time.strftime('%W', t),
'weekday': time.strftime('%w', t),
'h24': time.strftime('%H', t),
'h12': time.strftime('%I', t),
'min': time.strftime('%M', t),
'sec': time.strftime('%S', t),
}
def _next(self, cr, uid, seq_ids, context=None):
if not seq_ids:
return False
if context is None:
context = {}
force_company = context.get('force_company')
if not force_company:
force_company = self.pool.get('res.users').browse(cr, uid, uid).company_id.id
sequences = self.read(cr, uid, seq_ids, ['name','company_id','implementation','number_next','prefix','suffix','padding'])
preferred_sequences = [s for s in sequences if s['company_id'] and s['company_id'][0] == force_company ]
seq = preferred_sequences[0] if preferred_sequences else sequences[0]
if seq['implementation'] == 'standard':
cr.execute("SELECT nextval('ir_sequence_%03d')" % seq['id'])
seq['number_next'] = cr.fetchone()
else:
cr.execute("SELECT number_next FROM ir_sequence WHERE id=%s FOR UPDATE NOWAIT", (seq['id'],))
cr.execute("UPDATE ir_sequence SET number_next=number_next+number_increment WHERE id=%s ", (seq['id'],))
d = self._interpolation_dict()
try:
interpolated_prefix = self._interpolate(seq['prefix'], d)
interpolated_suffix = self._interpolate(seq['suffix'], d)
except ValueError:
raise osv.except_osv(_('Warning'), _('Invalid prefix or suffix for sequence \'%s\'') % (seq.get('name')))
return interpolated_prefix + '%%0%sd' % seq['padding'] % seq['number_next'] + interpolated_suffix
def next_by_id(self, cr, uid, sequence_id, context=None):
""" Draw an interpolated string using the specified sequence."""
self.check_access_rights(cr, uid, 'read')
company_ids = self.pool.get('res.company').search(cr, uid, [], context=context) + [False]
ids = self.search(cr, uid, ['&',('id','=', sequence_id),('company_id','in',company_ids)])
return self._next(cr, uid, ids, context)
def next_by_code(self, cr, uid, sequence_code, context=None):
""" Draw an interpolated string using a sequence with the requested code.
If several sequences with the correct code are available to the user
(multi-company cases), the one from the user's current company will
be used.
:param dict context: context dictionary may contain a
``force_company`` key with the ID of the company to
use instead of the user's current company for the
sequence selection. A matching sequence for that
specific company will get higher priority.
"""
self.check_access_rights(cr, uid, 'read')
company_ids = self.pool.get('res.company').search(cr, uid, [], context=context) + [False]
ids = self.search(cr, uid, ['&', ('code', '=', sequence_code), ('company_id', 'in', company_ids)])
return self._next(cr, uid, ids, context)
def get_id(self, cr, uid, sequence_code_or_id, code_or_id='id', context=None):
""" Draw an interpolated string using the specified sequence.
The sequence to use is specified by the ``sequence_code_or_id``
argument, which can be a code or an id (as controlled by the
``code_or_id`` argument. This method is deprecated.
"""
# TODO: bump up to warning after 6.1 release
_logger.debug("ir_sequence.get() and ir_sequence.get_id() are deprecated. "
"Please use ir_sequence.next_by_code() or ir_sequence.next_by_id().")
if code_or_id == 'id':
return self.next_by_id(cr, uid, sequence_code_or_id, context)
else:
return self.next_by_code(cr, uid, sequence_code_or_id, context)
def get(self, cr, uid, code, context=None):
""" Draw an interpolated string using the specified sequence.
The sequence to use is specified by its code. This method is
deprecated.
"""
return self.get_id(cr, uid, code, 'code', context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
asm-products/movie-database-service | ani/lib/python2.7/site-packages/django/core/management/commands/dbshell.py | 329 | 1243 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import connections, DEFAULT_DB_ALIAS
class Command(BaseCommand):
help = ("Runs the command-line client for specified database, or the "
"default database if none is provided.")
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
'open a shell. Defaults to the "default" database.'),
)
requires_model_validation = False
def handle(self, **options):
connection = connections[options.get('database')]
try:
connection.client.runshell()
except OSError:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError('You appear not to have the %r program installed or on your path.' % \
connection.client.executable_name)
| agpl-3.0 |
mark-ignacio/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/read_checksum_from_png_unittest.py | 124 | 3570 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import unittest2 as unittest
from webkitpy.common import read_checksum_from_png
class ReadChecksumFromPngTest(unittest.TestCase):
def test_read_checksum(self):
# Test a file with the comment.
filehandle = StringIO.StringIO('''\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x03 \x00\x00\x02X\x08\x02\x00\x00\x00\x15\x14\x15'\x00\x00\x00)tEXtchecksum\x003c4134fe2739880353f91c5b84cadbaaC\xb8?\xec\x00\x00\x16\xfeIDATx\x9c\xed\xdd[\x8cU\xe5\xc1\xff\xf15T\x18\x0ea,)\xa6\x80XZ<\x10\n\xd6H\xc4V\x88}\xb5\xa9\xd6r\xd5\x0bki0\xa6\xb5ih\xd2\xde\x98PHz\xd1\x02=\\q#\x01\x8b\xa5rJ\x8b\x88i\xacM\xc5h\x8cbMk(\x1ez@!\x0c\xd5\xd2\xc2\xb44\x1c\x848\x1dF(\xeb\x7f\xb1\xff\xd9\xef~g\xd6\xde3\xe0o\x10\xec\xe7sa6{\xd6z\xd6\xb3\xd7\xf3\xa8_7\xdbM[Y\x96\x05\x00\x009\xc3\xde\xeb\t\x00\x00\xbc\xdf\x08,\x00\x800\x81\x05\x00\x10&\xb0\x00\x00\xc2\x04\x16\x00@\x98\xc0\x02\x00\x08\x13X\x00\x00a\x02\x0b\x00 Lx01\x00\x84\t,\x00\x800\x81\x05\x00\x10\xd64\xb0\xda\x9a\xdb\xb6m\xdb\xb4i\xd3\xfa\x9fr\xf3\xcd7\x0f\xe5T\x07\xe5\xd4\xa9''')
checksum = read_checksum_from_png.read_checksum(filehandle)
self.assertEqual('3c4134fe2739880353f91c5b84cadbaa', checksum)
# Test a file without the comment.
filehandle = StringIO.StringIO('''\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x03 \x00\x00\x02X\x08\x02\x00\x00\x00\x15\x14\x15'\x00\x00\x16\xfeIDATx\x9c\xed\xdd[\x8cU\xe5\xc1\xff\xf15T\x18\x0ea,)\xa6\x80XZ<\x10\n\xd6H\xc4V\x88}\xb5\xa9\xd6r\xd5\x0bki0\xa6\xb5ih\xd2\xde\x98PHz\xd1\x02=\\q#\x01\x8b\xa5rJ\x8b\x88i\xacM\xc5h\x8cbMk(\x1ez@!\x0c\xd5\xd2\xc2\xb44\x1c\x848\x1dF(\xeb\x7f\xb1\xff\xd9\xef~g\xd6\xde3\xe0o\x10\xec\xe7sa6{\xd6z\xd6\xb3\xd7\xf3\xa8_7\xdbM[Y\x96\x05\x00\x009\xc3\xde\xeb\t\x00\x00\xbc\xdf\x08,\x00\x800\x81\x05\x00\x10&\xb0\x00\x00\xc2\x04\x16\x00@\x98\xc0\x02\x00\x08\x13X\x00\x00a\x02\x0b\x00 Lx01\x00\x84\t,\x00\x800\x81\x05\x00\x10\xd64\xb0\xda\x9a\xdb\xb6m\xdb\xb4i\xd3\xfa\x9fr\xf3\xcd7\x0f\xe5T\x07\xe5\xd4\xa9S\x8b\x17/\x1e?~\xfc\xf8\xf1\xe3\xef\xbf\xff\xfe\xf7z:M5\xbb\x87\x17\xcbUZ\x8f|V\xd7\xbd\x10\xb6\xcd{b\x88\xf6j\xb3\x9b?\x14\x9b\xa1>\xe6\xf9\xd9\xcf\x00\x17\x93''')
checksum = read_checksum_from_png.read_checksum(filehandle)
self.assertIsNone(checksum)
| bsd-3-clause |
wasade/American-Gut | tests/test_diversity_analysis.py | 5 | 38742 | #!/usr/bin/env python
from __future__ import division
from unittest import TestCase, main
import numpy as np
import numpy.testing as npt
import skbio
from os import rmdir
from os.path import realpath, dirname, join as pjoin, exists
from pandas import Series, DataFrame, Index
from pandas.util.testing import assert_index_equal, assert_frame_equal
from americangut.diversity_analysis import (pad_index,
check_dir,
post_hoc_pandas,
multiple_correct_post_hoc,
get_distance_vectors,
segment_colormap,
_get_bar_height,
_get_p_value,
_correct_p_value,
split_taxa,
get_ratio_heatmap)
__author__ = "Justine Debelius"
__copyright__ = "Copyright 2014"
__credits__ = ["Justine Debelius"]
__license__ = "BSD"
__version__ = "unversioned"
__maintainer__ = "Justine Debelius"
__email__ = "Justine.Debelius@colorado.edu"
# Determines the location fo the reference files
TEST_DIR = dirname(realpath(__file__))
class DiversityAnalysisTest(TestCase):
def setUp(self):
# Sets up lists for the data frame
self.ids = ['000001181.5654', '000001096.8485', '000001348.2238',
'000001239.2471', '000001925.5603', '000001098.6354',
'000001577.8059', '000001778.097' , '000001969.1967',
'000001423.7093', '000001180.1049', '000001212.5887',
'000001984.9281', '000001025.9349', '000001464.5884',
'000001800.6787', '000001629.5398', '000001473.443',
'000001351.1149', '000001223.1658', '000001884.0338',
'000001431.6762', '000001436.0807', '000001726.2609',
'000001717.784' , '000001290.9612', '000001806.4843',
'000001490.0658', '000001719.4572', '000001244.6229',
'000001092.3014', '000001315.8661', '000001525.8659',
'000001864.7889', '000001816.9' , '000001916.7858',
'000001261.3164', '000001593.2364', '000001817.3052',
'000001879.8596', '000001509.217' , '000001658.4638',
'000001741.9117', '000001940.457' , '000001620.315' ,
'000001706.6473', '000001287.1914', '000001370.8878',
'000001943.0664', '000001187.2735', '000001065.4497',
'000001598.6903', '000001254.2929', '000001526.143' ,
'000001980.8969', '000001147.6823', '000001745.3174',
'000001978.6417', '000001547.4582', '000001649.7564',
'000001752.3511', '000001231.5535', '000001875.7213',
'000001247.5567', '000001412.7777', '000001364.1045',
'000001124.3191', '000001654.0339', '000001795.4842',
'000001069.8469', '000001149.2945', '000001858.8903',
'000001667.8228', '000001648.5881', '000001775.0501',
'000001023.1689', '000001001.0859', '000001129.0853',
'000001992.9674', '000001174.3727', '000001126.3446',
'000001553.099' , '000001700.7898', '000001345.5369',
'000001821.4033', '000001921.0702', '000001368.0382',
'000001589.0756', '000001428.6135', '000001417.7107',
'000001050.2949', '000001549.0374', '000001169.7575',
'000001827.0751', '000001974.5358', '000001081.3137',
'000001452.7866', '000001194.8171', '000001781.3765',
'000001676.7693', '000001536.9816', '000001123.9341',
'000001950.0472', '000001386.1622', '000001789.8068',
'000001434.209', '000001156.782' , '000001630.8111',
'000001930.9789', '000001136.2997', '000001901.1578',
'000001358.6365', '000001834.4873', '000001175.739' ,
'000001565.3199', '000001532.5022', '000001844.4434',
'000001374.6652', '000001066.9395', '000001277.3526',
'000001765.7054', '000001195.7903', '000001403.1857',
'000001267.8034', '000001463.8063', '000001567.256' ,
'000001986.3291', '000001912.5336', '000001179.8083',
'000001539.4475', '000001702.7498', '000001362.2036',
'000001605.3957', '000001966.5905', '000001690.2717',
'000001796.78' , '000001965.9646', '000001570.6394',
'000001344.0749', '000001505.094' , '000001500.3763',
'000001887.334' , '000001896.9071', '000001061.5473',
'000001210.8434', '000001762.6421', '000001389.9375',
'000001747.7094', '000001275.7608', '000001100.6327',
'000001832.2851', '000001627.4754', '000001811.8183',
'000001202.8991', '000001163.3137', '000001196.7148',
'000001318.8771', '000001155.3022', '000001724.2977',
'000001737.328' , '000001289.1381', '000001480.495',
'000001797.7651', '000001117.9836', '000001108.0792',
'000001060.2191', '000001379.0706', '000001513.9224',
'000001731.9258', '000001563.7487', '000001988.1656',
'000001594.7285', '000001909.1042', '000001920.0818',
'000001999.9644', '000001133.9942', '000001608.1459',
'000001784.159' , '000001543.759' , '000001669.3403',
'000001545.3456', '000001177.5607', '000001387.8614',
'000001086.4642', '000001514.2136', '000001329.4163',
'000001757.7272', '000001574.9939', '000001750.1329',
'000001682.8423', '000001331.238' , '000001330.6685',
'000001556.6615', '000001575.4633', '000001754.591' ,
'000001456.5672', '000001707.2857', '000001164.864' ,
'000001466.7766', '000001383.5692', '000001911.8425',
'000001880.6072', '000001278.4999', '000001671.8068',
'000001301.3063', '000001071.2867', '000001192.7655',
'000001954.0541', '000001041.0466', '000001862.7417',
'000001587.4996', '000001242.6044', '000001040.399' ,
'000001744.3975', '000001189.5132', '000001885.0033',
'000001193.7964', '000001204.533' , '000001279.8583',
'000001488.2298', '000001971.1838', '000001492.0943',
'000001722.285' , '000001947.5481', '000001054.343' ,
'000001227.5756', '000001603.0731', '000001948.0095',
'000001393.6518', '000001661.6287', '000001829.9104',
'000001342.3216', '000001341.7147', '000001994.1765',
'000001400.0325', '000001324.5159', '000001355.789' ,
'000001538.6368', '000001121.0767', '000001377.1835',
'000001831.3158', '000001968.0205', '000001003.7916',
'000001502.0367', '000001729.5203', '000001284.1266',
'000001252.1786', '000001533.2349', '000001198.741' ,
'000001483.1918', '000001528.3996', '000001304.2649',
'000001281.7718', '000001441.8902', '000001203.4813',
'000001657.915' , '000001668.1396', '000001560.6021',
'000001213.1081', '000001894.5208', '000001791.9156',
'000001371.9864', '000001631.1904', '000001635.3301',
'000001541.2899', '000001748.311' , '000001326.0745',
'000001736.2491', '000001028.1898', '000001997.5772',
'000001764.9201', '000001664.4968', '000001031.0638',
'000001457.8448', '000001335.8157', '000001053.361' ,
'000001372.2917', '000001847.3652', '000001746.7838',
'000001173.0655', '000001653.9771', '000001104.8455',
'000001642.548' , '000001866.4881', '000001381.5643',
'000001673.6333', '000001839.2794', '000001855.195' ,
'000001698.1673', '000001813.0695', '000001153.6346',
'000001354.0321', '000001035.5915', '000001469.6652',
'000001422.9333', '000001148.4367', '000001551.0986',
'000001047.9434', '000001160.0422', '000001621.3736']
self.raw_ids = ['1181.5654', '1096.8485', '1348.2238', '1239.2471',
'1925.5603', '1098.6354', '1577.8059', '1778.097',
'1969.1967', '1423.7093', '1180.1049', '1212.5887',
'1984.9281', '1025.9349', '1464.5884', '1800.6787',
'1629.5398', '1473.443', '1351.1149', '1223.1658',
'1884.0338', '1431.6762', '1436.0807', '1726.2609',
'1717.784', '1290.9612', '1806.4843', '1490.0658',
'1719.4572', '1244.6229', '1092.3014', '1315.8661',
'1525.8659', '1864.7889', '1816.9', '1916.7858',
'1261.3164', '1593.2364', '1817.3052', '1879.8596',
'1509.217', '1658.4638', '1741.9117', '1940.457',
'1620.315', '1706.6473', '1287.1914', '1370.8878',
'1943.0664', '1187.2735', '1065.4497', '1598.6903',
'1254.2929', '1526.143', '1980.8969', '1147.6823',
'1745.3174', '1978.6417', '1547.4582', '1649.7564',
'1752.3511', '1231.5535', '1875.7213', '1247.5567',
'1412.7777', '1364.1045', '1124.3191', '1654.0339',
'1795.4842', '1069.8469', '1149.2945', '1858.8903',
'1667.8228', '1648.5881', '1775.0501', '1023.1689',
'1001.0859', '1129.0853', '1992.9674', '1174.3727',
'1126.3446', '1553.099', '1700.7898', '1345.5369',
'1821.4033', '1921.0702', '1368.0382', '1589.0756',
'1428.6135', '1417.7107', '1050.2949', '1549.0374',
'1169.7575', '1827.0751', '1974.5358', '1081.3137',
'1452.7866', '1194.8171', '1781.3765', '1676.7693',
'1536.9816', '1123.9341', '1950.0472', '1386.1622',
'1789.8068', '1434.209', '1156.782', '1630.8111',
'1930.9789', '1136.2997', '1901.1578', '1358.6365',
'1834.4873', '1175.739', '1565.3199', '1532.5022',
'1844.4434', '1374.6652', '1066.9395', '1277.3526',
'1765.7054', '1195.7903', '1403.1857', '1267.8034',
'1463.8063', '1567.256', '1986.3291', '1912.5336',
'1179.8083', '1539.4475', '1702.7498', '1362.2036',
'1605.3957', '1966.5905', '1690.2717', '1796.78',
'1965.9646', '1570.6394', '1344.0749', '1505.094',
'1500.3763', '1887.334', '1896.9071', '1061.5473',
'1210.8434', '1762.6421', '1389.9375', '1747.7094',
'1275.7608', '1100.6327', '1832.2851', '1627.4754',
'1811.8183', '1202.8991', '1163.3137', '1196.7148',
'1318.8771', '1155.3022', '1724.2977', '1737.328',
'1289.1381', '1480.495', '1797.7651', '1117.9836',
'1108.0792', '1060.2191', '1379.0706', '1513.9224',
'1731.9258', '1563.7487', '1988.1656', '1594.7285',
'1909.1042', '1920.0818', '1999.9644', '1133.9942',
'1608.1459', '1784.159', '1543.759', '1669.3403',
'1545.3456', '1177.5607', '1387.8614', '1086.4642',
'1514.2136', '1329.4163', '1757.7272', '1574.9939',
'1750.1329', '1682.8423', '1331.238', '1330.6685',
'1556.6615', '1575.4633', '1754.591', '1456.5672',
'1707.2857', '1164.864', '1466.7766', '1383.5692',
'1911.8425', '1880.6072', '1278.4999', '1671.8068',
'1301.3063', '1071.2867', '1192.7655', '1954.0541',
'1041.0466', '1862.7417', '1587.4996', '1242.6044',
'1040.399', '1744.3975', '1189.5132', '1885.0033',
'1193.7964', '1204.533', '1279.8583', '1488.2298',
'1971.1838', '1492.0943', '1722.285', '1947.5481',
'1054.343', '1227.5756', '1603.0731', '1948.0095',
'1393.6518', '1661.6287', '1829.9104', '1342.3216',
'1341.7147', '1994.1765', '1400.0325', '1324.5159',
'1355.789', '1538.6368', '1121.0767', '1377.1835',
'1831.3158', '1968.0205', '1003.7916', '1502.0367',
'1729.5203', '1284.1266', '1252.1786', '1533.2349',
'1198.741', '1483.1918', '1528.3996', '1304.2649',
'1281.7718', '1441.8902', '1203.4813', '1657.915',
'1668.1396', '1560.6021', '1213.1081', '1894.5208',
'1791.9156', '1371.9864', '1631.1904', '1635.3301',
'1541.2899', '1748.311', '1326.0745', '1736.2491',
'1028.1898', '1997.5772', '1764.9201', '1664.4968',
'1031.0638', '1457.8448', '1335.8157', '1053.361',
'1372.2917', '1847.3652', '1746.7838', '1173.0655',
'1653.9771', '1104.8455', '1642.548', '1866.4881',
'1381.5643', '1673.6333', '1839.2794', '1855.195',
'1698.1673', '1813.0695', '1153.6346', '1354.0321',
'1035.5915', '1469.6652', '1422.9333', '1148.4367',
'1551.0986', '1047.9434', '1160.0422', '1621.3736']
self.website = ['twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'twitter', 'twitter', 'twitter', 'twitter', 'twitter',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'facebook', 'facebook', 'facebook', 'facebook',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit',
'reddit', 'reddit', 'reddit', 'reddit', 'reddit']
self.time = np.array([43.75502506, 32.09982846, 66.44821015,
54.67751100, 74.43663107, 64.91509381,
101.03624273, 42.50120543, 35.92898678,
50.84800153, 46.32394154, 55.82813196,
63.90361272, 77.13825762, 78.76436441,
53.64704526, 64.75223193, 58.39207272,
52.44353642, 60.38707826, 56.51714085,
55.72374379, 59.52585080, 62.99625025,
40.04902494, 89.02585909, 63.23240605,
47.06553888, 73.00190315, 83.80903794,
43.41851989, 25.83410322, 68.21623464,
50.43442676, 49.98389215, 40.24409163,
73.12600309, 59.26529974, 61.66301113,
82.24776146, 69.88472085, 55.33333433,
40.29625976, 68.09510810, 66.85545440,
66.44002527, 72.37790419, 72.81679314,
55.09080142, 48.37538346, 47.60326036,
51.52223083, 56.51417473, 83.04863572,
52.14761947, 81.71073287, 40.88456188,
61.76308339, 75.31540245, 64.41482716,
52.36763551, 64.48863043, 42.46265519,
76.41626766, 73.35103300, 60.13966132,
55.09395578, 72.26945197, 64.14173225,
59.39558958, 54.92166432, 56.15937888,
35.82839971, 80.22338349, 52.03277136,
30.46794613, 58.48158453, 51.08064303,
67.56882508, 64.67001088, 70.31701029,
69.69418892, 45.40860831, 68.72559847,
57.18659048, 79.66512776, 54.12521925,
81.23543425, 79.58214820, 34.09101162,
34.07926981, 53.68661297, 84.73351889,
76.98667389, 83.91038109, 66.35125602,
43.38243470, 60.07458569, 64.01561208,
70.66573983, 193.40761370, 149.46771172,
178.54940784, 146.81737462, 112.67080963,
105.79566831, 169.60015351, 18.16782312,
32.33793705, 161.72043630, 136.65935083,
23.99200240, 124.30215961, 82.66230873,
181.53122374, 96.73843934, 149.75297762,
119.92104479, 29.30535556, 88.98066487,
82.18281694, 99.76251178, 120.62310261,
136.15837651, 140.85019656, 117.06990731,
163.65366512, 214.50717765, 79.72206954,
138.03112015, 144.45114437, 16.41512219,
72.08551518, 85.46372630, 149.13372767,
76.92212059, 109.55645713, 141.65595764,
119.18734692, 51.20662038, 183.75411201,
132.56555213, 101.55378472, 177.69500317,
130.27160521, 143.13166882, 107.23696643,
212.72330518, 130.66925461, 210.11532010,
118.65653641, 77.25638890, 153.29389237,
146.97514023, 0, 105.83704268,
200.05768527, 166.46158871, 135.60586892,
111.06739555, 71.50642636, 21.58216051,
183.15691697, 38.58822892, 38.84706613,
119.36492288, 108.77038019, 88.70541115,
12.61048676, 0, 157.77516036,
43.70631550, 193.87291179, 203.26411137,
179.20054809, 148.37792309, 170.38620220,
102.23651707, 63.46142967, 82.33043919,
258.68968847, 223.94730803, 281.46276889,
350.40078080, 281.53639290, 305.90987647,
286.22932832, 356.53308940, 276.81798226,
305.04298118, 299.13866751, 310.41638501,
347.77589112, 278.37912458, 295.00398672,
292.23076451, 348.14209652, 289.14551826,
288.86118512, 299.21300848, 264.29449774,
353.26294987, 275.68453639, 279.45885854,
287.79470948, 303.34990705, 324.73398364,
337.50702196, 326.59649321, 307.14724645,
300.13203731, 335.28447725, 273.59560986,
315.71949943, 268.86100671, 309.44822617,
357.67123883, 313.70684577, 311.99209985,
277.87145259, 316.89239037, 254.39694340,
300.02140552, 237.21539997, 329.92714491,
318.32432005, 326.65600788, 305.40145477,
326.78894825, 318.92641904, 320.59443395,
308.26919092, 300.00328438, 294.61849344,
284.55947774, 277.63798594, 359.44015820,
292.55982554, 322.71946292, 318.60262991,
307.93128984, 282.51266904, 304.74114309,
285.30356994, 240.53264849, 252.69086070,
289.49431273, 284.68590654, 317.95577632,
288.39433522, 303.55186227, 286.21794163,
281.11550530, 297.15770465, 307.37441274,
290.21885096, 297.39693356, 325.12591032,
340.14615302, 314.10755364, 321.41818630,
302.46825284, 272.60859596, 285.02155314,
260.57728373, 301.01186081, 314.01532677,
301.39435122, 301.53108663, 290.81233377,
331.20632569, 329.26192444, 252.12513671,
294.17604509, 314.25160994, 260.22225619,
296.06068483, 328.70473699, 293.72532762,
323.92449714, 279.36077985, 327.10547840,
332.33552711, 244.70073987, 368.94370441,
288.52914183, 270.96734651, 321.09234466,
395.74872017, 311.64415600, 314.81990465,
319.70690366, 313.96061624, 275.38526052,
338.02460670, 286.98781666, 353.55909038,
306.62353307, 306.92733543, 273.74222557])
# # Creates a data frame object
self.df = DataFrame({'WEBSITE': Series(self.website, index=self.ids),
'DWELL_TIME': Series(self.time, index=self.ids)})
# Creates the distance matrix object
self.ids2 = np.array(['000001181.5654', '000001096.8485',
'000001348.2238', '000001239.2471',
'000001925.5603', '000001148.4367',
'000001551.0986', '000001047.9434',
'000001160.0422', '000001621.3736'])
self.map = self.df.loc[self.ids2]
dist = np.array([[0.000, 0.297, 0.257, 0.405, 0.131, 0.624, 0.934,
0.893, 0.519, 0.904],
[0.297, 0.000, 0.139, 0.130, 0.348, 1.000, 0.796,
1.000, 0.647, 0.756],
[0.257, 0.139, 0.000, 0.384, 0.057, 0.748, 0.599,
0.710, 0.528, 1.000],
[0.405, 0.130, 0.384, 0.000, 0.303, 0.851, 0.570,
0.698, 1.000, 0.638],
[0.131, 0.348, 0.057, 0.303, 0.000, 0.908, 1.000,
0.626, 0.891, 1.000],
[0.624, 1.000, 0.748, 0.851, 0.908, 0.000, 0.264,
0.379, 0.247, 0.385],
[0.934, 0.796, 0.599, 0.570, 1.000, 0.264, 0.000,
0.336, 0.326, 0.530],
[0.893, 1.000, 0.710, 0.698, 0.626, 0.379, 0.336,
0.000, 0.257, 0.450],
[0.519, 0.647, 0.528, 1.000, 0.891, 0.247, 0.326,
0.257, 0.000, 0.492],
[0.904, 0.756, 1.000, 0.638, 1.000, 0.385, 0.530,
0.450, 0.492, 0.000]])
self.dm = skbio.DistanceMatrix(dist, self.ids2)
self.taxa = ['k__Bacteria; p__[Proteobacteria]; '
'c__Gammaproteobacteria; o__; f__; g__; s__',
'k__Bacteria; p__Proteobacteria; '
'c__Gammaproteobacteria; o__Enterobacteriales; '
'f__Enterbacteriaceae; g__Escherichia; s__coli']
self.sub_p = DataFrame(np.array([['ref_group1 vs. ref_group1',
'ref_group1 vs. group1', 0.01],
['ref_group2 vs. group2',
'ref_group2 vs. ref_group2', 0.02],
['group3 vs. ref_group3',
'ref_group3 vs. ref_group3', 0.03],
['ref_group4 vs. ref_group4',
'group4 vs. ref_group4', 0.04]]),
columns=['Group 1', 'Group 2', 'p_value'])
self.sub_p.p_value = self.sub_p.p_value.astype(float)
self.sub_p_lookup = {k: set(self.sub_p[k].values) for k in
('Group 1', 'Group 2')}
def test_pad_index_default(self):
# Creates a data frame with raw ids and no sample column
df = DataFrame({'#SampleID': self.raw_ids,
'WEBSITE': Series(self.website),
'DWELL_TIME': Series(self.time)})
# Pads the raw text
df = pad_index(df)
assert_index_equal(self.df.index, df.index)
def test_pad_index_custom_index(self):
# Creates a data frame with raw ids and no sample column
df = DataFrame({'RawID': self.raw_ids,
'WEBSITE': Series(self.website),
'DWELL_TIME': Series(self.time)})
# Pads the raw text
df = pad_index(df, index_col='RawID')
assert_index_equal(self.df.index, df.index)
def test_pad_index_number(self):
# Creates a data frame with raw ids and no sample column
df = DataFrame({'#SampleID': self.raw_ids,
'WEBSITE': Series(self.website),
'DWELL_TIME': Series(self.time)})
# Pads the raw text
df = pad_index(df, nzeros=4)
assert_index_equal(Index(self.raw_ids), df.index)
def test_check_dir(self):
# Sets up a dummy directory that does not exist
does_not_exist = pjoin(TEST_DIR, 'this_dir_does_not_exist')
# Checks the directory does not currently exist
self.assertFalse(exists(does_not_exist))
# checks the directory
check_dir(does_not_exist)
# Checks the directory exists now
self.assertTrue(exists(does_not_exist))
# Removes the directory
rmdir(does_not_exist)
def test_post_hoc_pandas(self):
known_index = Index(['twitter', 'facebook', 'reddit'],
name='WEBSITE')
known_df = DataFrame(np.array([[100, 60.435757, 60.107124, 14.632637,
np.nan, np.nan],
[80, 116.671135, 119.642984, 54.642403,
7.010498e-14, np.nan],
[120, 302.615690, 301.999670,
28.747101, 2.636073e-37,
5.095701e-33]]),
index=known_index,
columns=['Counts', 'Mean', 'Median', 'Stdv',
'twitter', 'facebook'])
known_df.Counts = known_df.Counts.astype('int64')
test_df = post_hoc_pandas(self.df, 'WEBSITE', 'DWELL_TIME')
assert_frame_equal(known_df, test_df)
def test_multiple_correct_post_hoc(self):
known_df = DataFrame(np.array([[np.nan, 4e-2, 1e-3],
[4e-4, np.nan, 1e-6],
[4e-7, 4e-8, np.nan]]),
columns=[0, 1, 2])
raw_ph = DataFrame(np.power(10, -np.array([[np.nan, 2, 3],
[4, np.nan, 6],
[7, 8, np.nan]])),
columns=[0, 1, 2])
order = np.arange(0, 3)
test_df = multiple_correct_post_hoc(raw_ph, order, 'fdr_bh')
assert_frame_equal(known_df, test_df)
def test_segemented_colormap(self):
known_cmap = np.array([[0.88207613, 0.95386390, 0.69785469, 1.],
[0.59215687, 0.84052289, 0.72418302, 1.],
[0.25268744, 0.71144946, 0.76838141, 1.],
[0.12026144, 0.50196080, 0.72156864, 1.],
[0.14136102, 0.25623991, 0.60530568, 1.]])
test_cmap = segment_colormap('YlGnBu', 5)
npt.assert_almost_equal(test_cmap, known_cmap, 5)
def test_get_bar_height(self):
test_lowest, test_fudge = \
_get_bar_height(np.array([0.01, 0.02, 0.3, 0.52]))
npt.assert_almost_equal(test_lowest, 0.55, 3)
self.assertEqual(test_fudge, 10)
def test_get_bar_height_fudge(self):
test_lowest, test_fudge = \
_get_bar_height(np.array([0.01, 0.02, 0.3, 0.52]), factor=3)
self.assertEqual(test_lowest, 0.54)
self.assertEqual(test_fudge, 10)
def test_get_p_value(self):
self.assertEqual(_get_p_value(self.sub_p, self.sub_p_lookup,
'ref_group1', 'group1', 'p_value'), 0.01)
self.assertEqual(_get_p_value(self.sub_p, self.sub_p_lookup,
'ref_group2', 'group2', 'p_value'), 0.02)
self.assertEqual(_get_p_value(self.sub_p, self.sub_p_lookup,
'ref_group3', 'group3', 'p_value'), 0.03)
self.assertEqual(_get_p_value(self.sub_p, self.sub_p_lookup,
'ref_group4', 'group4', 'p_value'), 0.04)
def test_get_p_value_error(self):
with self.assertRaises(ValueError):
_get_p_value(self.sub_p, self.sub_p_lookup, 'ref_group',
'group', 'p_value')
def test_correct_p_value_no_tail(self):
p_value = 0.05
tail = False
self.assertEqual(_correct_p_value(tail, p_value, 1, 1), p_value)
def test_correct_p_value_no_greater_ref(self):
p_value = 0.05
tail = True
self.assertEqual(_correct_p_value(tail, p_value, 2, 1), 1)
def test_correct_p_value_no_less_ref(self):
p_value = 0.05
tail = True
self.assertEqual(_correct_p_value(tail, p_value, 1, 2), p_value)
def test_get_distance_vectors(self):
known_within = {'twitter': np.array([0.297, 0.257, 0.405, 0.131,
0.139, 0.130, 0.348, 0.384,
0.057, 0.303]),
'reddit': np.array([0.264, 0.379, 0.247, 0.385, 0.336,
0.326, 0.530, 0.257, 0.450,
0.492])}
known_between = {('twitter', 'reddit'): np.array([0.624, 0.934, 0.893,
0.519, 0.904, 1.000,
0.796, 1.000, 0.647,
0.756, 0.748, 0.599,
0.710, 0.528, 1.000,
0.851, 0.570, 0.698,
1.000, 0.638, 0.908,
1.000, 0.626, 0.891,
1.000])}
test_within, test_between = \
get_distance_vectors(dm=self.dm,
df=self.map,
group='WEBSITE',
order=['twitter', 'reddit'])
# Tests the results
self.assertEqual(known_within.keys(), test_within.keys())
self.assertEqual(known_between.keys(), test_between.keys())
for k, a in test_within.iteritems():
npt.assert_array_equal(known_within[k], a)
for k, a in test_between.iteritems():
npt.assert_array_equal(known_between[k], a)
def test_split_taxa_error(self):
with self.assertRaises(ValueError):
split_taxa(['k__Bacteria; p__[Proteobacteria]; '
'c__Gammaproteobacteria'], 7)
def test_split_taxa(self):
known_taxa = np.array([['Bacteria', 'cont. Proteobacteria',
'Gammaproteobacteria',
'c. Gammaproteobacteria',
'c. Gammaproteobacteria',
'c. Gammaproteobacteria',
'c. Gammaproteobacteria'],
['Bacteria', 'Proteobacteria',
'Gammaproteobacteria', 'Enterobacteriales',
'Enterbacteriaceae', 'Escherichia', 'coli']],
dtype='|S32')
known_levels = ['kingdom', 'phylum', 'p_class', 'p_order', 'family',
'genus', 'species']
test_taxa, test_levels = split_taxa(self.taxa, 7)
self.assertEqual(known_levels, test_levels)
npt.assert_array_equal(known_taxa, test_taxa)
def test_get_ratio_heatmap(self):
data = np.array([[1, 2, 3, 4],
[2, 4, 6, 8],
[3, 6, 9, 12],
[4, 8, 12, 16]])
known = np.array([[0.4, 0.8, 1.2, 1.6],
[0.4, 0.8, 1.2, 1.6],
[0.4, 0.8, 1.2, 1.6],
[0.4, 0.8, 1.2, 1.6]])
test = get_ratio_heatmap(data)
npt.assert_array_equal(test, known)
def test_get_ratio_heatmap_log(self):
data = np.array([[2, 4, 8, 16],
[1, 4, 16, 256]])
known = np.array([[0, 1, 2, 3],
[0, 2, 4, 8]])
test = get_ratio_heatmap(data, ref_pos=0, log=2)
npt.assert_array_equal(test, known)
if __name__ == '__main__':
main()
| bsd-3-clause |
ovnicraft/edx-platform | lms/djangoapps/notes/migrations/0001_initial.py | 50 | 1404 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
import xmodule_django.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('course_id', xmodule_django.models.CourseKeyField(max_length=255, db_index=True)),
('uri', models.CharField(max_length=255, db_index=True)),
('text', models.TextField(default=b'')),
('quote', models.TextField(default=b'')),
('range_start', models.CharField(max_length=2048)),
('range_start_offset', models.IntegerField()),
('range_end', models.CharField(max_length=2048)),
('range_end_offset', models.IntegerField()),
('tags', models.TextField(default=b'')),
('created', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('updated', models.DateTimeField(auto_now=True, db_index=True)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
| agpl-3.0 |
SlamJam/asq | asq/test/test_where.py | 5 | 1156 | import unittest
from asq.queryables import Queryable
from asq.test.test_queryable import infinite, TracingGenerator
__author__ = "Robert Smallshire"
class TestWhere(unittest.TestCase):
def test_where(self):
a = range(0, 100)
b = Queryable(a).where(lambda x: x % 3 == 0).to_list()
c = list(range(0, 100, 3))
self.assertEqual(b, c)
def test_where_not_callable(self):
a = range(0, 100)
self.assertRaises(TypeError, lambda: Queryable(a).where("not callable"))
def test_where_infinite(self):
a = infinite()
b = Queryable(a).where(lambda x: x % 5 == 0).take(3).to_list()
c = [0, 5, 10]
self.assertEqual(b, c)
def test_where_deferred(self):
a = TracingGenerator()
self.assertEqual(a.trace, [])
b = Queryable(a).where(lambda x: x % 3 == 0)
self.assertEqual(a.trace, [])
c = b.take(2).to_list()
self.assertEqual(a.trace, [0, 1, 2, 3])
def test_where_closed(self):
a = range(0, 100)
b = Queryable(a)
b.close()
self.assertRaises(ValueError, lambda: b.where(lambda x: x % 3 == 0)) | mit |
grevutiu-gabriel/sympy | sympy/printing/tests/test_conventions.py | 97 | 3780 | from sympy import symbols, Derivative, Integral, exp, cos, oo, Function
from sympy.functions.special.bessel import besselj
from sympy.functions.special.polynomials import legendre
from sympy.functions.combinatorial.numbers import bell
from sympy.printing.conventions import split_super_sub, requires_partial
def test_super_sub():
assert split_super_sub("beta_13_2") == ("beta", [], ["13", "2"])
assert split_super_sub("beta_132_20") == ("beta", [], ["132", "20"])
assert split_super_sub("beta_13") == ("beta", [], ["13"])
assert split_super_sub("x_a_b") == ("x", [], ["a", "b"])
assert split_super_sub("x_1_2_3") == ("x", [], ["1", "2", "3"])
assert split_super_sub("x_a_b1") == ("x", [], ["a", "b1"])
assert split_super_sub("x_a_1") == ("x", [], ["a", "1"])
assert split_super_sub("x_1_a") == ("x", [], ["1", "a"])
assert split_super_sub("x_1^aa") == ("x", ["aa"], ["1"])
assert split_super_sub("x_1__aa") == ("x", ["aa"], ["1"])
assert split_super_sub("x_11^a") == ("x", ["a"], ["11"])
assert split_super_sub("x_11__a") == ("x", ["a"], ["11"])
assert split_super_sub("x_a_b_c_d") == ("x", [], ["a", "b", "c", "d"])
assert split_super_sub("x_a_b^c^d") == ("x", ["c", "d"], ["a", "b"])
assert split_super_sub("x_a_b__c__d") == ("x", ["c", "d"], ["a", "b"])
assert split_super_sub("x_a^b_c^d") == ("x", ["b", "d"], ["a", "c"])
assert split_super_sub("x_a__b_c__d") == ("x", ["b", "d"], ["a", "c"])
assert split_super_sub("x^a^b_c_d") == ("x", ["a", "b"], ["c", "d"])
assert split_super_sub("x__a__b_c_d") == ("x", ["a", "b"], ["c", "d"])
assert split_super_sub("x^a^b^c^d") == ("x", ["a", "b", "c", "d"], [])
assert split_super_sub("x__a__b__c__d") == ("x", ["a", "b", "c", "d"], [])
assert split_super_sub("alpha_11") == ("alpha", [], ["11"])
assert split_super_sub("alpha_11_11") == ("alpha", [], ["11", "11"])
def test_requires_partial():
x, y, z, t, nu = symbols('x y z t nu')
n = symbols('n', integer=True)
f = x * y
assert requires_partial(Derivative(f, x)) is True
assert requires_partial(Derivative(f, y)) is True
## integrating out one of the variables
assert requires_partial(Derivative(Integral(exp(-x * y), (x, 0, oo)), y, evaluate=False)) is False
## bessel function with smooth parameter
f = besselj(nu, x)
assert requires_partial(Derivative(f, x)) is True
assert requires_partial(Derivative(f, nu)) is True
## bessel function with integer parameter
f = besselj(n, x)
assert requires_partial(Derivative(f, x)) is False
# this is not really valid (differentiating with respect to an integer)
# but there's no reason to use the partial derivative symbol there. make
# sure we don't throw an exception here, though
assert requires_partial(Derivative(f, n)) is False
## bell polynomial
f = bell(n, x)
assert requires_partial(Derivative(f, x)) is False
# again, invalid
assert requires_partial(Derivative(f, n)) is False
## legendre polynomial
f = legendre(0, x)
assert requires_partial(Derivative(f, x)) is False
f = legendre(n, x)
assert requires_partial(Derivative(f, x)) is False
# again, invalid
assert requires_partial(Derivative(f, n)) is False
f = x ** n
assert requires_partial(Derivative(f, x)) is False
assert requires_partial(Derivative(Integral((x*y) ** n * exp(-x * y), (x, 0, oo)), y, evaluate=False)) is False
# parametric equation
f = (exp(t), cos(t))
g = sum(f)
assert requires_partial(Derivative(g, t)) is False
# function of unspecified variables
f = symbols('f', cls=Function)
assert requires_partial(Derivative(f, x)) is False
assert requires_partial(Derivative(f, x, y)) is True
| bsd-3-clause |
pchri03/net-next | scripts/gdb/linux/utils.py | 630 | 4267 | #
# gdb helper commands and functions for Linux kernel debugging
#
# common utilities
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
class CachedType:
def __init__(self, name):
self._type = None
self._name = name
def _new_objfile_handler(self, event):
self._type = None
gdb.events.new_objfile.disconnect(self._new_objfile_handler)
def get_type(self):
if self._type is None:
self._type = gdb.lookup_type(self._name)
if self._type is None:
raise gdb.GdbError(
"cannot resolve type '{0}'".format(self._name))
if hasattr(gdb, 'events') and hasattr(gdb.events, 'new_objfile'):
gdb.events.new_objfile.connect(self._new_objfile_handler)
return self._type
long_type = CachedType("long")
def get_long_type():
global long_type
return long_type.get_type()
def offset_of(typeobj, field):
element = gdb.Value(0).cast(typeobj)
return int(str(element[field].address).split()[0], 16)
def container_of(ptr, typeobj, member):
return (ptr.cast(get_long_type()) -
offset_of(typeobj, member)).cast(typeobj)
class ContainerOf(gdb.Function):
"""Return pointer to containing data structure.
$container_of(PTR, "TYPE", "ELEMENT"): Given PTR, return a pointer to the
data structure of the type TYPE in which PTR is the address of ELEMENT.
Note that TYPE and ELEMENT have to be quoted as strings."""
def __init__(self):
super(ContainerOf, self).__init__("container_of")
def invoke(self, ptr, typename, elementname):
return container_of(ptr, gdb.lookup_type(typename.string()).pointer(),
elementname.string())
ContainerOf()
BIG_ENDIAN = 0
LITTLE_ENDIAN = 1
target_endianness = None
def get_target_endianness():
global target_endianness
if target_endianness is None:
endian = gdb.execute("show endian", to_string=True)
if "little endian" in endian:
target_endianness = LITTLE_ENDIAN
elif "big endian" in endian:
target_endianness = BIG_ENDIAN
else:
raise gdb.GdbError("unknown endianness '{0}'".format(str(endian)))
return target_endianness
def read_u16(buffer):
if get_target_endianness() == LITTLE_ENDIAN:
return ord(buffer[0]) + (ord(buffer[1]) << 8)
else:
return ord(buffer[1]) + (ord(buffer[0]) << 8)
def read_u32(buffer):
if get_target_endianness() == LITTLE_ENDIAN:
return read_u16(buffer[0:2]) + (read_u16(buffer[2:4]) << 16)
else:
return read_u16(buffer[2:4]) + (read_u16(buffer[0:2]) << 16)
def read_u64(buffer):
if get_target_endianness() == LITTLE_ENDIAN:
return read_u32(buffer[0:4]) + (read_u32(buffer[4:8]) << 32)
else:
return read_u32(buffer[4:8]) + (read_u32(buffer[0:4]) << 32)
target_arch = None
def is_target_arch(arch):
if hasattr(gdb.Frame, 'architecture'):
return arch in gdb.newest_frame().architecture().name()
else:
global target_arch
if target_arch is None:
target_arch = gdb.execute("show architecture", to_string=True)
return arch in target_arch
GDBSERVER_QEMU = 0
GDBSERVER_KGDB = 1
gdbserver_type = None
def get_gdbserver_type():
def exit_handler(event):
global gdbserver_type
gdbserver_type = None
gdb.events.exited.disconnect(exit_handler)
def probe_qemu():
try:
return gdb.execute("monitor info version", to_string=True) != ""
except:
return False
def probe_kgdb():
try:
thread_info = gdb.execute("info thread 2", to_string=True)
return "shadowCPU0" in thread_info
except:
return False
global gdbserver_type
if gdbserver_type is None:
if probe_qemu():
gdbserver_type = GDBSERVER_QEMU
elif probe_kgdb():
gdbserver_type = GDBSERVER_KGDB
if gdbserver_type is not None and hasattr(gdb, 'events'):
gdb.events.exited.connect(exit_handler)
return gdbserver_type
| gpl-2.0 |
oaastest/Azure-MachineLearning-ClientLibrary-Python | azureml/serialization.py | 4 | 5330 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License:
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#--------------------------------------------------------------------------
from functools import partial
import codecs
import pandas as pd
from azureml.errors import (
UnsupportedDatasetTypeError,
_not_none,
_not_none_or_empty,
)
class DataTypeIds(object):
"""Constants for the known dataset data type id strings."""
ARFF = 'ARFF'
PlainText = 'PlainText'
GenericCSV = 'GenericCSV'
GenericTSV = 'GenericTSV'
GenericCSVNoHeader = 'GenericCSVNoHeader'
GenericTSVNoHeader = 'GenericTSVNoHeader'
def _dataframe_to_csv(writer, dataframe, delimiter, with_header):
"""serialize the dataframe with different delimiters"""
encoding_writer = codecs.getwriter('utf-8')(writer)
dataframe.to_csv(
path_or_buf=encoding_writer,
sep=delimiter,
header=with_header,
index=False
)
def _dataframe_to_txt(writer, dataframe):
encoding_writer = codecs.getwriter('utf-8')(writer)
for row in dataframe.iterrows():
encoding_writer.write("".join(row[1].tolist()))
encoding_writer.write('\n')
def _dataframe_from_csv(reader, delimiter, with_header, skipspace):
"""Returns csv data as a pandas Dataframe object"""
sep = delimiter
header = 0
if not with_header:
header = None
return pd.read_csv(
reader,
header=header,
sep=sep,
skipinitialspace=skipspace,
encoding='utf-8-sig'
)
def _dataframe_from_txt(reader):
"""Returns PlainText data as a pandas Dataframe object"""
return pd.read_csv(reader, header=None, sep="\n", encoding='utf-8-sig')
_SERIALIZERS = {
DataTypeIds.PlainText: (
_dataframe_to_txt,
_dataframe_from_txt,
),
DataTypeIds.GenericCSV: (
partial(_dataframe_to_csv, delimiter=',', with_header=True),
partial(_dataframe_from_csv, delimiter=',', with_header=True, skipspace=True),
),
DataTypeIds.GenericCSVNoHeader: (
partial(_dataframe_to_csv, delimiter=',', with_header=False),
partial(_dataframe_from_csv, delimiter=',', with_header=False, skipspace=True),
),
DataTypeIds.GenericTSV: (
partial(_dataframe_to_csv, delimiter='\t', with_header=True),
partial(_dataframe_from_csv, delimiter='\t', with_header=True, skipspace=False),
),
DataTypeIds.GenericTSVNoHeader: (
partial(_dataframe_to_csv, delimiter='\t', with_header=False),
partial(_dataframe_from_csv, delimiter='\t', with_header=False, skipspace=False),
),
}
def serialize_dataframe(writer, data_type_id, dataframe):
"""
Serialize a dataframe.
Parameters
----------
writer : file
File-like object to write to. Must be opened in binary mode.
data_type_id : dict
Serialization format to use.
See the azureml.DataTypeIds class for constants.
dataframe: pandas.DataFrame
Dataframe to serialize.
"""
_not_none('writer', writer)
_not_none_or_empty('data_type_id', data_type_id)
_not_none('dataframe', dataframe)
serializer = _SERIALIZERS.get(data_type_id)
if serializer is None:
raise UnsupportedDatasetTypeError(data_type_id)
serializer[0](writer=writer, dataframe=dataframe)
def deserialize_dataframe(reader, data_type_id):
"""
Deserialize a dataframe.
Parameters
----------
reader : file
File-like object to read from. Must be opened in binary mode.
data_type_id : dict
Serialization format of the raw data.
See the azureml.DataTypeIds class for constants.
Returns
-------
pandas.DataFrame
Dataframe object.
"""
_not_none('reader', reader)
_not_none_or_empty('data_type_id', data_type_id)
serializer = _SERIALIZERS.get(data_type_id)
if serializer is None:
raise UnsupportedDatasetTypeError(data_type_id)
return serializer[1](reader=reader)
def is_supported(data_type_id):
"""Return if a serializer is available for the specified format."""
_not_none_or_empty('data_type_id', data_type_id)
return _SERIALIZERS.get(data_type_id) is not None
| mit |
renshawbay/pika-python3 | pika/amqp_object.py | 18 | 1659 | """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.items():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, sorted(items))
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param body: The message body
:type body: str or unicode
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| mpl-2.0 |
martinribelotta/micropython | tests/extmod/uctypes_sizeof_native.py | 61 | 1036 | import uctypes
S1 = {}
assert uctypes.sizeof(S1) == 0
S2 = {"a": uctypes.UINT8 | 0}
assert uctypes.sizeof(S2) == 1
S3 = {
"a": uctypes.UINT8 | 0,
"b": uctypes.UINT8 | 1,
}
assert uctypes.sizeof(S3) == 2
S4 = {
"a": uctypes.UINT8 | 0,
"b": uctypes.UINT32 | 4,
"c": uctypes.UINT8 | 8,
}
assert uctypes.sizeof(S4) == 12
S5 = {
"a": uctypes.UINT8 | 0,
"b": uctypes.UINT32 | 4,
"c": uctypes.UINT8 | 8,
"d": uctypes.UINT32 | 0,
"sub": (4, {
"b0": uctypes.UINT8 | 0,
"b1": uctypes.UINT8 | 1,
}),
}
assert uctypes.sizeof(S5) == 12
s5 = uctypes.struct(0, S5)
assert uctypes.sizeof(s5) == 12
assert uctypes.sizeof(s5.sub) == 2
S6 = {
"ptr": (uctypes.PTR | 0, uctypes.UINT8),
}
# As if there're no other arch bitnesses
assert uctypes.sizeof(S6) in (4, 8)
S7 = {
"arr": (uctypes.ARRAY | 0, uctypes.UINT8 | 5),
}
assert uctypes.sizeof(S7) == 5
S8 = {
"arr": (uctypes.ARRAY | 0, 3, {"a": uctypes.UINT32 | 0, "b": uctypes.UINT8 | 4}),
}
assert uctypes.sizeof(S8) == 24
| mit |
rrampage/rethinkdb | external/v8_3.30.33.16/testing/gmock/scripts/generator/cpp/gmock_class.py | 62 | 8286 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate Google Mock classes from base classes.
This program will read in a C++ source file and output the Google Mock
classes for the specified classes. If no class is specified, all
classes in the source file are emitted.
Usage:
gmock_class.py header-file.h [ClassName]...
Output is sent to stdout.
"""
__author__ = 'nnorwitz@google.com (Neal Norwitz)'
import os
import re
import sys
from cpp import ast
from cpp import utils
# Preserve compatibility with Python 2.3.
try:
_dummy = set
except NameError:
import sets
set = sets.Set
_VERSION = (1, 0, 1) # The version of this script.
# How many spaces to indent. Can set me with the INDENT environment variable.
_INDENT = 2
def _GenerateMethods(output_lines, source, class_node):
function_type = (ast.FUNCTION_VIRTUAL | ast.FUNCTION_PURE_VIRTUAL |
ast.FUNCTION_OVERRIDE)
ctor_or_dtor = ast.FUNCTION_CTOR | ast.FUNCTION_DTOR
indent = ' ' * _INDENT
for node in class_node.body:
# We only care about virtual functions.
if (isinstance(node, ast.Function) and
node.modifiers & function_type and
not node.modifiers & ctor_or_dtor):
# Pick out all the elements we need from the original function.
const = ''
if node.modifiers & ast.FUNCTION_CONST:
const = 'CONST_'
return_type = 'void'
if node.return_type:
# Add modifiers like 'const'.
modifiers = ''
if node.return_type.modifiers:
modifiers = ' '.join(node.return_type.modifiers) + ' '
return_type = modifiers + node.return_type.name
template_args = [arg.name for arg in node.return_type.templated_types]
if template_args:
return_type += '<' + ', '.join(template_args) + '>'
if len(template_args) > 1:
for line in [
'// The following line won\'t really compile, as the return',
'// type has multiple template arguments. To fix it, use a',
'// typedef for the return type.']:
output_lines.append(indent + line)
if node.return_type.pointer:
return_type += '*'
if node.return_type.reference:
return_type += '&'
num_parameters = len(node.parameters)
if len(node.parameters) == 1:
first_param = node.parameters[0]
if source[first_param.start:first_param.end].strip() == 'void':
# We must treat T(void) as a function with no parameters.
num_parameters = 0
tmpl = ''
if class_node.templated_types:
tmpl = '_T'
mock_method_macro = 'MOCK_%sMETHOD%d%s' % (const, num_parameters, tmpl)
args = ''
if node.parameters:
# Due to the parser limitations, it is impossible to keep comments
# while stripping the default parameters. When defaults are
# present, we choose to strip them and comments (and produce
# compilable code).
# TODO(nnorwitz@google.com): Investigate whether it is possible to
# preserve parameter name when reconstructing parameter text from
# the AST.
if len([param for param in node.parameters if param.default]) > 0:
args = ', '.join(param.type.name for param in node.parameters)
else:
# Get the full text of the parameters from the start
# of the first parameter to the end of the last parameter.
start = node.parameters[0].start
end = node.parameters[-1].end
# Remove // comments.
args_strings = re.sub(r'//.*', '', source[start:end])
# Condense multiple spaces and eliminate newlines putting the
# parameters together on a single line. Ensure there is a
# space in an argument which is split by a newline without
# intervening whitespace, e.g.: int\nBar
args = re.sub(' +', ' ', args_strings.replace('\n', ' '))
# Create the mock method definition.
output_lines.extend(['%s%s(%s,' % (indent, mock_method_macro, node.name),
'%s%s(%s));' % (indent*3, return_type, args)])
def _GenerateMocks(filename, source, ast_list, desired_class_names):
processed_class_names = set()
lines = []
for node in ast_list:
if (isinstance(node, ast.Class) and node.body and
# desired_class_names being None means that all classes are selected.
(not desired_class_names or node.name in desired_class_names)):
class_name = node.name
parent_name = class_name
processed_class_names.add(class_name)
class_node = node
# Add namespace before the class.
if class_node.namespace:
lines.extend(['namespace %s {' % n for n in class_node.namespace]) # }
lines.append('')
# Add template args for templated classes.
if class_node.templated_types:
# TODO(paulchang): The AST doesn't preserve template argument order,
# so we have to make up names here.
# TODO(paulchang): Handle non-type template arguments (e.g.
# template<typename T, int N>).
template_arg_count = len(class_node.templated_types.keys())
template_args = ['T%d' % n for n in range(template_arg_count)]
template_decls = ['typename ' + arg for arg in template_args]
lines.append('template <' + ', '.join(template_decls) + '>')
parent_name += '<' + ', '.join(template_args) + '>'
# Add the class prolog.
lines.append('class Mock%s : public %s {' # }
% (class_name, parent_name))
lines.append('%spublic:' % (' ' * (_INDENT // 2)))
# Add all the methods.
_GenerateMethods(lines, source, class_node)
# Close the class.
if lines:
# If there are no virtual methods, no need for a public label.
if len(lines) == 2:
del lines[-1]
# Only close the class if there really is a class.
lines.append('};')
lines.append('') # Add an extra newline.
# Close the namespace.
if class_node.namespace:
for i in range(len(class_node.namespace)-1, -1, -1):
lines.append('} // namespace %s' % class_node.namespace[i])
lines.append('') # Add an extra newline.
if desired_class_names:
missing_class_name_list = list(desired_class_names - processed_class_names)
if missing_class_name_list:
missing_class_name_list.sort()
sys.stderr.write('Class(es) not found in %s: %s\n' %
(filename, ', '.join(missing_class_name_list)))
elif not processed_class_names:
sys.stderr.write('No class found in %s\n' % filename)
return lines
def main(argv=sys.argv):
if len(argv) < 2:
sys.stderr.write('Google Mock Class Generator v%s\n\n' %
'.'.join(map(str, _VERSION)))
sys.stderr.write(__doc__)
return 1
global _INDENT
try:
_INDENT = int(os.environ['INDENT'])
except KeyError:
pass
except:
sys.stderr.write('Unable to use indent of %s\n' % os.environ.get('INDENT'))
filename = argv[1]
desired_class_names = None # None means all classes in the source file.
if len(argv) >= 3:
desired_class_names = set(argv[2:])
source = utils.ReadFile(filename)
if source is None:
return 1
builder = ast.BuilderFromSource(source, filename)
try:
entire_ast = filter(None, builder.Generate())
except KeyboardInterrupt:
return
except:
# An error message was already printed since we couldn't parse.
pass
else:
lines = _GenerateMocks(filename, source, entire_ast, desired_class_names)
sys.stdout.write('\n'.join(lines))
if __name__ == '__main__':
main(sys.argv)
| agpl-3.0 |
sapcc/monasca-agent | monasca_setup/agent_config.py | 1 | 4798 | # (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP
"""Classes to aid in configuration of the agent."""
import collections
import logging
import os
import pwd
import yaml
log = logging.getLogger(__name__)
class Plugins(collections.defaultdict):
"""A container for the plugin configurations used by the monasca-agent.
This is essentially a defaultdict(dict) but put into a class primarily to make the interface clear, also
to add a couple of helper methods.
Each plugin config is stored with the key being its config name (excluding .yaml).
The value a dict which will convert to yaml.
"""
def __init__(self):
super(Plugins, self).__init__(dict)
# todo Possibly enforce the key being a string without .yaml in it.
def diff(self, other_plugins):
raise NotImplementedError
def merge(self, other):
"""Do a deep merge with precedence going to other (as is the case with update).
"""
# Implemented as a function so it can be used for arbitrary dictionaries not just self, this is needed
# for the recursive nature of the merge.
deep_merge(self, other)
def deep_merge(adict, other):
"""A recursive merge of two dictionaries including combining of any lists within the data structure.
"""
for key, value in other.iteritems():
if key in adict:
if isinstance(adict[key], dict) and isinstance(value, dict):
deep_merge(adict[key], value)
elif isinstance(adict[key], list) and isinstance(value, list):
adict[key] += value
else:
adict[key] = value
def merge_by_name(first, second):
"""Merge a list of dictionaries replacing any dictionaries with the same 'name' value rather than merging.
The precedence goes to first.
"""
first_names = [i['name'] for i in first if 'name' in i]
for item in second:
if 'name' not in item or item['name'] not in first_names:
first.append(item)
def read_plugin_config_from_disk(config_dir, plugin_name):
"""Reads from the Agent on disk configuration the config for a specific plugin
:param config_dir: Monasca Agent configuration directory
:param plugin_name: The name of the check plugin
:return: Dictionary of parsed yaml content
"""
config_path = os.path.join(config_dir, 'conf.d', plugin_name + '.yaml')
config = None
if os.path.exists(config_path):
with open(config_path, 'r') as config_file:
config = yaml.safe_load(config_file.read())
return config
def save_plugin_config(config_dir, plugin_name, user, conf):
"""Writes configuration for plugin_name to disk in the config_dir
:param config_dir: Monasca Agent configuration directory
:param plugin_name: The name of the check plugin
:param user: The username Monasca-agent will run as
:param conf: The value of the configuration to write to disk
:return: None
"""
config_path = os.path.join(config_dir, 'conf.d', plugin_name + '.yaml')
with open(config_path, 'w') as config_file:
# The gid is created on service activation which we assume has happened
config_file.write(yaml.safe_dump(conf,
encoding='utf-8',
allow_unicode=True,
default_flow_style=False))
stat = pwd.getpwnam(user)
gid = stat.pw_gid
uid = stat.pw_uid
os.chmod(config_path, 0o640)
os.chown(config_path, uid, gid)
def check_endpoint_changes(value, config):
"""Change urls in config with same path but different protocols into new
endpoints.
"""
new_url = value['instances'][0]['url']
old_urls = [i['url'] for i in config['instances'] if 'url' in i]
new_path = new_url.split("://")[1]
old_paths = [url.split("://")[1] for url in old_urls]
for i, old_path in enumerate(old_paths):
if old_path == new_path:
if config['instances'][i]['url'] == config['instances'][i]['name']:
config['instances'][i]['name'] = new_url
config['instances'][i]['url'] = new_url
return config
def delete_from_config(args, config, file_path, plugin_name):
if args.dry_run:
info_msg = ("Changes would be made to the config file {0}".format(file_path))
else:
if len(config['instances']) == 0:
info_msg = ("Removing configuration file {0} it is no longer needed.".format(file_path))
os.remove(file_path)
else:
info_msg = ("Saving changes to configuration file {0}.".format(file_path))
save_plugin_config(args.config_dir, plugin_name, args.user, config)
log.info(info_msg)
| bsd-3-clause |
angkot/geojson-visualization | server.py | 1 | 2465 | from datetime import datetime
import json
import sqlite3
from bottle import get, post, run, request, static_file
db = sqlite3.connect('data.db')
cursor = db.cursor()
def init_db():
# check table
cursor.execute('SELECT COUNT(*) FROM sqlite_master WHERE type=? AND name=?',
('table', 'data'))
cnt = cursor.fetchone()[0]
if cnt == 0:
# create table
cursor.execute('CREATE TABLE data (id INTEGER PRIMARY KEY, ts TIMESTAMP, data BLOB)')
data = []
def load_db():
global data
cursor.execute('SELECT id, ts, data FROM data ORDER BY id')
for row in cursor:
data.append(dict(data_id=row[0],
timestamp=row[1],
data=json.loads(row[2])))
print 'loaded {} data'.format(len(data))
@post('/store/<idx>')
def store(idx):
idx = int(idx)
if idx >= len(data):
return dict(status='error', msg='invalid data index: {}'.format(idx))
d = data[idx]
cursor.execute('INSERT INTO data (ts, data) VALUES (?, ?)',
(datetime.strptime(d['timestamp'], '%Y-%m-%d %H:%M:%S'),
json.dumps(d['data'])))
db.commit()
d['data_id'] = cursor.lastrowid
return dict(status='ok', data_id=d['data_id'])
@get('/get/<idx>')
def get_data(idx):
idx = int(idx)
if idx >= len(data):
return dict(status='error', msg='invalid data index: {}'.format(idx))
return dict(status='ok', data=data[idx])
@post('/save')
def save():
p = request.forms.get('data')
try:
p = json.loads(p)
except (ValueError, TypeError), e:
print e
return dict(status='error', msg='invalid data')
data.append(dict(timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
data_id=None, data=p))
idx = len(data)
return dict(status='ok', data_index=idx)
@get('/status')
def status():
return dict(status='ok', total=len(data))
@get('/update')
def update():
start = int(request.query.get('start', 0))
return dict(status='ok', start=start,
data=[dict(index=start+index,
timestamp=d['timestamp'],
id=d['data_id'])
for index, d in enumerate(data[start:])])
@get('/static/<fname>')
def static(fname):
return static_file(fname, root='.')
@get('/')
def index():
return static_file('index.html', root='.')
init_db()
load_db()
run(port=8000)
| agpl-3.0 |
dksaputra/community | work-tools/sauron/sauron/deprecation_tracker/report.py | 1 | 2153 | from deprecation import list_deprecated_before
from docopt import docopt
from trello import TrelloClient
from datetime import datetime
CARD_DESC_TEMPLATE = '''Remove all items that were deprecated in or before version {tag}.
This list automatically updated: {date}
{deprecated_desc}
'''
def create_line_title(filename, line):
for srcFolder in ['src/test/java/','src/main/java/']:
if srcFolder in filename:
return filename.split(srcFolder)[1].replace("/",".") + ":" + line
return '{0}:{1}'.format(filename,line)
def report_in_trello(trello, board_id, list_id, tag, deprecated, github_baseurl):
line_url = '{0}/blob/master'.format(github_baseurl)
card_title = 'Remove deprecated methods from before {tag}'.format(tag=tag)
# Create a markdown-formatted list of deprecated lines
deprecated_desc = "* " + "\n* ".join( [ '[{title}]({line_url}/{file}#L{line})'.format(title=create_line_title(file,line), line_url=line_url, file=file, line=line) for file, line in deprecated] )
desc = CARD_DESC_TEMPLATE.format(tag=tag,date=datetime.now(), deprecated_desc=deprecated_desc)
# Find board
board = trello.get_board(board_id)
if len(deprecated) > 0:
for card in board.open_cards():
if card.name == card_title:
print "Updating existing card.."
card.set_description(desc)
break
else:
# No card, create one
trello.get_list(list_id).add_card(card_title, desc)
else: # No deprecations
for card in board.open_cards():
if card.name == card_title:
print "Archiving existing card (no deprecated things found).."
card.set_closed(True)
break
def report_deprecations(tag, trello, board_id, list_id, github_baseurl):
# Get list of deprecated stuff
print 'Finding @Deprecated tags added before {0}..'.format(tag)
deprecated = list(list_deprecated_before(tag))
# Post results to trello
print 'Connecting to trello..'
report_in_trello( trello, board_id, list_id, tag, deprecated, github_baseurl)
print "Done." | gpl-3.0 |
Asurada2015/TFAPI_translation | Variables/tf_variables_to_restore.py | 1 | 2124 | """
variables_to_restore是为了在保持模型的时候方便使用滑动平均的参数,如果不使用这个保存,那模型就会保存所以参数,
除非你提前设定,就是在保存的时候指定保存变量也是可以的,比如saver = tf.train.Saver([v])这样就可以指定保存变量v,
在模型导入的时候只有这个变量会被导入。
"""
import tensorflow as tf
# v = tf.Variable(tf.constant(0.0, dtype=tf.float32), name='v')
# ema = tf.train.ExponentialMovingAverage(0.99) # 设定滑动平均函数
# maintain_average_op = ema.apply(tf.global_variables()) # 将所有变量值加入滑动平均函数中
# saver = tf.train.Saver()
# with tf.Session() as sess:
# sess.run(tf.global_variables_initializer())
#
# sess.run(tf.assign(v, 10.0))
# sess.run(maintain_average_op)
# saver.save(sess, './Files/models_01.ckpt')
# 以上是变量的保存阶段,这时候变量被保存到ckpt文件中,我们主要做的是ckpt的restore操作
"""模型导入_1"""
# v = tf.Variable(tf.constant(0.0, dtype=tf.float32), name='v')
#
# ema = tf.train.ExponentialMovingAverage(0.99)
# maintain_average_op = ema.apply(tf.global_variables())
#
# saver = tf.train.Saver()
# with tf.Session() as sess:
# saver.restore(sess, './Files/models_01.ckpt')
# print(sess.run(ema.average(v)))
# sess.run(ema.average(v))
# print(sess.run(v))
# 0.0999999
# 10.0
"""这样不是很方便,因为我再次导入模型,变量v的值我不用,并且想要用计算后的值替代v,
而此处还是v原先的值,我们需要输出函数经过计算后的值"""
"""模型导入_2"""
"""导入模型的时候tf.train.Saver函数要变化一下,变为tf.train.Saver(ema.variables_to_restore()"""
v = tf.Variable(tf.constant(0.0, dtype=tf.float32), name='v')
ema = tf.train.ExponentialMovingAverage(0.99)
maintain_average_op = ema.apply(tf.global_variables())
saver = tf.train.Saver(ema.variables_to_restore()) # 这是导入计算后的值的关键步骤
with tf.Session() as sess:
saver.restore(sess, './Files/models_01.ckpt')
print(sess.run(v))
# 0.0999999
| apache-2.0 |
Mythridor/aws-scripting | ec2/basics.py | 1 | 1145 | #! /usr/local/bin/python3.5
import boto3
import time
def list_all_ec2():
ec2 = boto3.resource('ec2')
return ec2.instances.all()
def create_instance():
ec2 = boto3.resource('ec2')
instance = ec2.create_instances(
ImageId='ami-ebd02392',
MinCount=1,
MaxCount=1,
InstanceType='t2.micro',
KeyName='Mythri',
NetworkInterfaces=[{
'DeviceIndex': 0,
'SubnetId': 'subnet-35debb52',
'AssociatePublicIpAddress': True,
'Groups': ['sg-a73667df']
}]
)
while instance[len(instance) - 1].state == "pending":
print(instance[len(instance) - 1], instance[len(instance) - 1].state)
time.sleep(5)
instance[len(instance) - 1].update()
instance[len(instance) - 1].add_tag("Name", "test")
print("done", instance[len(instance) - 1])
def terminate_all_ec2():
ec2 = boto3.resource('ec2')
for instance in ec2.instances.all():
instance.terminate()
def stop_all_ec2():
ec2 = boto3.resource('ec2')
for instance in ec2.instances.all():
instance.stop()
create_instance() | mit |
TangHao1987/intellij-community | python/lib/Lib/distutils/command/upload.py | 87 | 7325 | """distutils.command.upload
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
from distutils.errors import *
from distutils.core import Command
from distutils.spawn import spawn
from distutils import log
from hashlib import md5
import os
import socket
import platform
import ConfigParser
import httplib
import base64
import urlparse
import cStringIO as StringIO
class upload(Command):
description = "upload binary package to PyPI"
DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('sign', 's',
'sign files to upload using gpg'),
('identity=', 'i', 'GPG identity used to sign files'),
]
boolean_options = ['show-response', 'sign']
def initialize_options(self):
self.username = ''
self.password = ''
self.repository = ''
self.show_response = 0
self.sign = False
self.identity = None
def finalize_options(self):
if self.identity and not self.sign:
raise DistutilsOptionError(
"Must use --sign for --identity to have meaning"
)
if os.environ.has_key('HOME'):
rc = os.path.join(os.environ['HOME'], '.pypirc')
if os.path.exists(rc):
self.announce('Using PyPI login from %s' % rc)
config = ConfigParser.ConfigParser({
'username':'',
'password':'',
'repository':''})
config.read(rc)
if not self.repository:
self.repository = config.get('server-login', 'repository')
if not self.username:
self.username = config.get('server-login', 'username')
if not self.password:
self.password = config.get('server-login', 'password')
if not self.repository:
self.repository = self.DEFAULT_REPOSITORY
def run(self):
if not self.distribution.dist_files:
raise DistutilsOptionError("No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
def upload_file(self, command, pyversion, filename):
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data - send all the meta-data in case we need to
# register a new release
content = open(filename,'rb').read()
meta = self.distribution.metadata
data = {
# action
':action': 'file_upload',
'protcol_version': '1',
# identify release
'name': meta.get_name(),
'version': meta.get_version(),
# file content
'content': (os.path.basename(filename),content),
'filetype': command,
'pyversion': pyversion,
'md5_digest': md5(content).hexdigest(),
# additional meta-data
'metadata_version' : '1.0',
'summary': meta.get_description(),
'home_page': meta.get_url(),
'author': meta.get_contact(),
'author_email': meta.get_contact_email(),
'license': meta.get_licence(),
'description': meta.get_long_description(),
'keywords': meta.get_keywords(),
'platform': meta.get_platforms(),
'classifiers': meta.get_classifiers(),
'download_url': meta.get_download_url(),
# PEP 314
'provides': meta.get_provides(),
'requires': meta.get_requires(),
'obsoletes': meta.get_obsoletes(),
}
comment = ''
if command == 'bdist_rpm':
dist, version, id = platform.dist()
if dist:
comment = 'built for %s %s' % (dist, version)
elif command == 'bdist_dumb':
comment = 'built for %s' % platform.platform(terse=1)
data['comment'] = comment
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc").read())
# set up the authentication
auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) != type([]):
value = [value]
for value in value:
if type(value) is tuple:
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
http = httplib.HTTPConnection(netloc)
elif schema == 'https':
http = httplib.HTTPSConnection(netloc)
else:
raise AssertionError, "unsupported schema "+schema
data = ''
loglevel = log.INFO
try:
http.connect()
http.putrequest("POST", url)
http.putheader('Content-type',
'multipart/form-data; boundary=%s'%boundary)
http.putheader('Content-length', str(len(body)))
http.putheader('Authorization', auth)
http.endheaders()
http.send(body)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
r = http.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print '-'*75, r.read(), '-'*75
| apache-2.0 |
lucianodasilva/lincxx | test/gtest/test/gtest_color_test.py | 3259 | 4911 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
| mit |
mdegis/machine-learning | 005 - Datasets Questions/explore_enron_data.py | 1 | 2442 | #!/usr/bin/python
"""
starter code for exploring the Enron dataset (emails + finances)
loads up the dataset (pickled dict of dicts)
the dataset has the form
enron_data["LASTNAME FIRSTNAME MIDDLEINITIAL"] = { features_dict }
{features_dict} is a dictionary of features associated with that person
you should explore features_dict as part of the mini-project,
but here's an example to get you started:
enron_data["SKILLING JEFFREY K"]["bonus"] = 5600000
"""
import pickle
enron_data = pickle.load(open("final_project_dataset.pkl", "r"))
money, n_poi, n_sal, n_email, total_payments, n_poi_nan_pay = 0, 0, 0, 0, 0, 0
people = ("SKILLING JEFFREY K", "LAY KENNETH L","FASTOW ANDREW S")
who = ""
for i in enron_data:
if enron_data[i]["poi"]:
n_poi += 1
if enron_data[i]["total_payments"] == "NaN":
n_poi_nan_pay += 1
if enron_data[i]["email_address"] != "NaN":
n_email += 1
if enron_data[i]["salary"] != "NaN":
n_sal += 1
if enron_data[i]["total_payments"] == "NaN":
total_payments += 1
for i in people:
if money<enron_data[i]["total_payments"]:
money = enron_data[i]["total_payments"]
who = i
print "- How many data points (people) are in the dataset?\n+ %r" % len(enron_data)
print "- For each person, how many features are available?\n+ %r" % len(enron_data["SKILLING JEFFREY K"])
print "- How many POIs are there in the E+F dataset?\n+ %r" % n_poi
# check the poi_names.txt file
print "- How many POIs were there total?\n+ %r" % 35
print "- What is the total value of the stock belonging to James Prentice?\n+ %r" % enron_data["PRENTICE JAMES"]["total_stock_value"]
print "- How many email messages do we have from Wesley Colwell to persons of interest?\n+ %r" % enron_data["COLWELL WESLEY"]["from_this_person_to_poi"]
print "- What is the value of stock options exercised by Jeffrey Skilling?\n+ %r" % enron_data["SKILLING JEFFREY K"]["exercised_stock_options"]
print "- Of these three individuals (Lay, Skilling and Fastow), who took home the most money?\n+ %r, %r"% (who, money)
print "- How many folks in this dataset have a quantified salary?\n+ %r" % n_sal
print "- What about a known email address?\n+ %r" % n_email
print "- How many people in the E+F dataset have NaN for their total payments?\n+ %r" % (total_payments / float(len(enron_data)))
print "- What percentage of POIs in the dataset hane 'NaN' for their payments?\n+ %r" % (n_poi_nan_pay / float(n_poi))
| gpl-3.0 |
raymondnijssen/QGIS | python/plugins/processing/gui/FileSelectionPanel.py | 12 | 3363 | # -*- coding: utf-8 -*-
"""
***************************************************************************
FileSelectionPanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import warnings
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import QFileDialog
from qgis.core import QgsSettings
from processing.tools.system import isWindows
pluginPath = os.path.split(os.path.dirname(__file__))[0]
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'widgetBaseSelector.ui'))
class FileSelectionPanel(BASE, WIDGET):
def __init__(self, isFolder, ext=None):
super(FileSelectionPanel, self).__init__(None)
self.setupUi(self)
self.ext = ext or '*'
self.isFolder = isFolder
self.btnSelect.clicked.connect(self.showSelectionDialog)
def showSelectionDialog(self):
# Find the file dialog's working directory
settings = QgsSettings()
text = self.leText.text()
if os.path.isdir(text):
path = text
elif os.path.isdir(os.path.dirname(text)):
path = os.path.dirname(text)
elif settings.contains('/Processing/LastInputPath'):
path = settings.value('/Processing/LastInputPath')
else:
path = ''
if self.isFolder:
folder = QFileDialog.getExistingDirectory(self,
self.tr('Select Folder'), path)
if folder:
self.leText.setText(folder)
settings.setValue('/Processing/LastInputPath',
os.path.dirname(folder))
else:
filenames, selected_filter = QFileDialog.getOpenFileNames(self,
self.tr('Select File'), path, self.tr('{} files').format(self.ext.upper()) + ' (*.' + self.ext + self.tr(');;All files (*.*)'))
if filenames:
self.leText.setText(u';'.join(filenames))
settings.setValue('/Processing/LastInputPath',
os.path.dirname(filenames[0]))
def getValue(self):
s = self.leText.text()
if isWindows():
s = s.replace('\\', '/')
return s
def setText(self, text):
self.leText.setText(text)
| gpl-2.0 |
sauloal/cnidaria | scripts/venv/lib/python2.7/site-packages/cogent/maths/matrix/distance.py | 1 | 3872 | #!/usr/bin/env python
"""Code supporting distance matrices with arbitrary row/column labels.
Currently used to support amino acid distance matrices and similar.
NOTE: This is _much_ slower than using a numpy array. It is primarily
convenient when you want to index into a matrix by keys (e.g. by amino acid
labels) and when you expect to have a lot of missing values. You will probably
want to use this for prototyping, then move to numpy arrays if and when
performance becomes an issue.
"""
from cogent.util.misc import Delegator
from cogent.util.dict2d import Dict2D
from copy import deepcopy
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__credits__ = ["Greg Caporaso", "Rob Knight"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Greg Caporaso"
__email__ = "caporaso@colorado.edu"
__status__ = "Production"
class DistanceMatrix(Dict2D, Delegator):
""" 2D dict giving distances from A to B and vice versa """
# default set of amino acids
RowOrder = list('ACDEFGHIKLMNPQRSTVWY')
ColOrder = list('ACDEFGHIKLMNPQRSTVWY')
Pad = True
def __init__(self, data=None, RowOrder=None, ColOrder=None, Default=None,
Pad=None, RowConstructor=None, info=None):
""" Init dict with pre-exisitng data: dict of dicts
Usage:
data = distance matrix in form acceptable by Dict2D class
RowOrder = list of 'interesting keys', default is the set of
all amino acids
ColOrder = list of 'interesting keys', default is the set of
all amino acids
Default = value to set padded elements to
Pad = boolean describing whether to fill object to hold all
possible elements based on RowOrder and ColOrder
RowConstructor = constructor to use when building inner
objects, default dict
info = the AAIndexRecord object
Power = Power the original matrix has been raised to yield current
matrix
"""
if RowOrder is not None:
self.RowOrder = RowOrder
if ColOrder is not None:
self.ColOrder = ColOrder
if Pad is not None:
self.Pad = Pad
# Initialize super class attributes
Dict2D.__init__(self, data=data, RowOrder=self.RowOrder,\
ColOrder=self.ColOrder, Default=Default, Pad=self.Pad,\
RowConstructor=RowConstructor)
Delegator.__init__(self, info)
# The power to which the original data has been raised to give
# the current data, starts at 1., modified by elementPow()
# accessed as self.Power
self.__dict__['Power'] = 1.
def elementPow(self, power, ignore_invalid=True):
""" Raises all elements in matrix to power
power: the power to raise all elements in the matrix to,
must be a floatable value or a TypeError is raise
ignore_invalid: leaves invalid (not floatable)
matrix data untouched
"""
try:
n = float(power)
except ValueError:
raise TypeError, 'Must pass a floatable value to elementPow'
if ignore_invalid:
def Pow(x):
try:
return x**n
except TypeError:
return x
else:
def Pow(x):
return x**n
self.scale(Pow)
self.Power = self.Power * n
def copy(self):
""" Returns a deep copy of the DistanceMatrix object """
# Is there a better way to do this? It's tricky to keep the delegator
# part functioning
return deepcopy(self)
| mit |
daodewang/qingcloud-sdk-python | tests/test_json_tool.py | 1 | 2035 | # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
import unittest
from qingcloud.misc.json_tool import json_dump, json_load
class JsonToolTestCase(unittest.TestCase):
def test_json_dump_dict(self):
obj = {1:1, 'str': 'string', 'none': None}
expected = '{"1":1,"none":null,"str":"string"}'
self.assertEqual(json_dump(obj), expected)
def test_json_dump_invalid_obj(self):
obj = {unittest: 'invalid key'}
expected = None
self.assertEqual(json_dump(obj), expected)
def test_json_dump_list(self):
obj = [1, 4, '3']
expected = '[1,4,"3"]'
self.assertEqual(json_dump(obj), expected)
def test_json_load_list(self):
string = '{"int":1,"none":null,"str":"string"}'
expected = {'int':1, 'str': 'string', 'none': None}
self.assertEqual(json_load(string), expected)
def test_json_load_string(self):
string = '{"int":1,"none":null,"str":"string"}'
expected = {'int':1, 'str': 'string', 'none': None}
self.assertEqual(json_load(string), expected)
def test_json_load_invalid_string(self):
string = '{"int":1,:null,"str":"string"}'
expected = None
self.assertEqual(json_load(string), expected)
| apache-2.0 |
pelmers/dxr | dxr/plugins/clang/tests/test_operator_call.py | 7 | 2060 | from dxr.plugins.clang.tests import CSingleFileTestCase
class OperatorCallTests(CSingleFileTestCase):
source = """
struct Foo
{
void operator()(int)
{
}
void operator[](int)
{
}
};
int main()
{
Foo foo;
int alpha = 0;
foo(alpha);
int beta = 0;
foo[beta];
return 0;
}
"""
def test_operator_call(self):
self.found_line_eq('+function-ref:Foo::operator()(int)',
'foo<b>(</b>alpha);')
def test_call_argument(self):
self.found_line_eq('+var-ref:main()::alpha',
'foo(<b>alpha</b>);')
def test_operator_subscript(self):
self.found_line_eq('+function-ref:Foo::operator[](int)',
'foo<b>[</b>beta];')
def test_subscript_argument(self):
self.found_line_eq('+var-ref:main()::beta',
'foo[<b>beta</b>];')
class ExplicitOperatorCallTests(CSingleFileTestCase):
source = """
struct Foo
{
void operator()(int)
{
}
void operator[](int)
{
}
};
int main()
{
Foo foo;
int alpha = 0;
foo.operator()(alpha);
int beta = 0;
foo.operator[](beta);
return 0;
}
"""
def test_operator_call(self):
self.found_line_eq('+function-ref:Foo::operator()(int)',
'foo.<b>operator()</b>(alpha);')
def test_call_argument(self):
self.found_line_eq('+var-ref:main()::alpha',
'foo.operator()(<b>alpha</b>);')
def test_operator_subscript(self):
self.found_line_eq('+function-ref:Foo::operator[](int)',
'foo.<b>operator[]</b>(beta);')
def test_subscript_argument(self):
self.found_line_eq('+var-ref:main()::beta',
'foo.operator[](<b>beta</b>);')
| mit |
liucode/tempest-master | tools/skip_tracker.py | 46 | 5132 | #!/usr/bin/env python
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Track test skips via launchpadlib API and raise alerts if a bug
is fixed but a skip is still in the Tempest test code
"""
import logging
import os
import re
from launchpadlib import launchpad
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
TESTDIR = os.path.join(BASEDIR, 'tempest')
LPCACHEDIR = os.path.expanduser('~/.launchpadlib/cache')
def info(msg, *args, **kwargs):
logging.info(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
logging.debug(msg, *args, **kwargs)
def find_skips(start=TESTDIR):
"""
Returns a list of tuples (method, bug) that represent
test methods that have been decorated to skip because of
a particular bug.
"""
results = {}
debug("Searching in %s", start)
for root, _dirs, files in os.walk(start):
for name in files:
if name.startswith('test_') and name.endswith('py'):
path = os.path.join(root, name)
debug("Searching in %s", path)
temp_result = find_skips_in_file(path)
for method_name, bug_no in temp_result:
if results.get(bug_no):
result_dict = results.get(bug_no)
if result_dict.get(name):
result_dict[name].append(method_name)
else:
result_dict[name] = [method_name]
results[bug_no] = result_dict
else:
results[bug_no] = {name: [method_name]}
return results
def find_skips_in_file(path):
"""
Return the skip tuples in a test file
"""
BUG_RE = re.compile(r'\s*@.*skip_because\(bug=[\'"](\d+)[\'"]')
DEF_RE = re.compile(r'\s*def (\w+)\(')
bug_found = False
results = []
lines = open(path, 'rb').readlines()
for x, line in enumerate(lines):
if not bug_found:
res = BUG_RE.match(line)
if res:
bug_no = int(res.group(1))
debug("Found bug skip %s on line %d", bug_no, x + 1)
bug_found = True
else:
res = DEF_RE.match(line)
if res:
method = res.group(1)
debug("Found test method %s skips for bug %d", method, bug_no)
results.append((method, bug_no))
bug_found = False
return results
def get_results(result_dict):
results = []
for bug_no in result_dict.keys():
for method in result_dict[bug_no]:
results.append((method, bug_no))
return results
if __name__ == '__main__':
logging.basicConfig(format='%(levelname)s: %(message)s',
level=logging.INFO)
results = find_skips()
unique_bugs = sorted(set([bug for (method, bug) in get_results(results)]))
unskips = []
duplicates = []
info("Total bug skips found: %d", len(results))
info("Total unique bugs causing skips: %d", len(unique_bugs))
lp = launchpad.Launchpad.login_anonymously('grabbing bugs',
'production',
LPCACHEDIR)
for bug_no in unique_bugs:
bug = lp.bugs[bug_no]
duplicate = bug.duplicate_of_link
if duplicate is not None:
dup_id = duplicate.split('/')[-1]
duplicates.append((bug_no, dup_id))
for task in bug.bug_tasks:
info("Bug #%7s (%12s - %12s)", bug_no,
task.importance, task.status)
if task.status in ('Fix Released', 'Fix Committed'):
unskips.append(bug_no)
for bug_id, dup_id in duplicates:
if bug_id not in unskips:
dup_bug = lp.bugs[dup_id]
for task in dup_bug.bug_tasks:
info("Bug #%7s is a duplicate of Bug#%7s (%12s - %12s)",
bug_id, dup_id, task.importance, task.status)
if task.status in ('Fix Released', 'Fix Committed'):
unskips.append(bug_id)
unskips = sorted(set(unskips))
if unskips:
print("The following bugs have been fixed and the corresponding skips")
print("should be removed from the test cases:")
print()
for bug in unskips:
message = " %7s in " % bug
locations = ["%s" % x for x in results[bug].keys()]
message += " and ".join(locations)
print(message)
| apache-2.0 |
benranco/SNPpipeline | tools/vcflib/googletest/googletest/scripts/release_docs.py | 1167 | 6132 | #!/usr/bin/env python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for branching Google Test/Mock wiki pages for a new version.
SYNOPSIS
release_docs.py NEW_RELEASE_VERSION
Google Test and Google Mock's external user documentation is in
interlinked wiki files. When we release a new version of
Google Test or Google Mock, we need to branch the wiki files
such that users of a specific version of Google Test/Mock can
look up documenation relevant for that version. This script
automates that process by:
- branching the current wiki pages (which document the
behavior of the SVN trunk head) to pages for the specified
version (e.g. branching FAQ.wiki to V2_6_FAQ.wiki when
NEW_RELEASE_VERSION is 2.6);
- updating the links in the branched files to point to the branched
version (e.g. a link in V2_6_FAQ.wiki that pointed to
Primer.wiki#Anchor will now point to V2_6_Primer.wiki#Anchor).
NOTE: NEW_RELEASE_VERSION must be a NEW version number for
which the wiki pages don't yet exist; otherwise you'll get SVN
errors like "svn: Path 'V1_7_PumpManual.wiki' is not a
directory" when running the script.
EXAMPLE
$ cd PATH/TO/GTEST_SVN_WORKSPACE/trunk
$ scripts/release_docs.py 2.6 # create wiki pages for v2.6
$ svn status # verify the file list
$ svn diff # verify the file contents
$ svn commit -m "release wiki pages for v2.6"
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sys
import common
# Wiki pages that shouldn't be branched for every gtest/gmock release.
GTEST_UNVERSIONED_WIKIS = ['DevGuide.wiki']
GMOCK_UNVERSIONED_WIKIS = [
'DesignDoc.wiki',
'DevGuide.wiki',
'KnownIssues.wiki'
]
def DropWikiSuffix(wiki_filename):
"""Removes the .wiki suffix (if any) from the given filename."""
return (wiki_filename[:-len('.wiki')] if wiki_filename.endswith('.wiki')
else wiki_filename)
class WikiBrancher(object):
"""Branches ..."""
def __init__(self, dot_version):
self.project, svn_root_path = common.GetSvnInfo()
if self.project not in ('googletest', 'googlemock'):
sys.exit('This script must be run in a gtest or gmock SVN workspace.')
self.wiki_dir = svn_root_path + '/wiki'
# Turn '2.6' to 'V2_6_'.
self.version_prefix = 'V' + dot_version.replace('.', '_') + '_'
self.files_to_branch = self.GetFilesToBranch()
page_names = [DropWikiSuffix(f) for f in self.files_to_branch]
# A link to Foo.wiki is in one of the following forms:
# [Foo words]
# [Foo#Anchor words]
# [http://code.google.com/.../wiki/Foo words]
# [http://code.google.com/.../wiki/Foo#Anchor words]
# We want to replace 'Foo' with 'V2_6_Foo' in the above cases.
self.search_for_re = re.compile(
# This regex matches either
# [Foo
# or
# /wiki/Foo
# followed by a space or a #, where Foo is the name of an
# unversioned wiki page.
r'(\[|/wiki/)(%s)([ #])' % '|'.join(page_names))
self.replace_with = r'\1%s\2\3' % (self.version_prefix,)
def GetFilesToBranch(self):
"""Returns a list of .wiki file names that need to be branched."""
unversioned_wikis = (GTEST_UNVERSIONED_WIKIS if self.project == 'googletest'
else GMOCK_UNVERSIONED_WIKIS)
return [f for f in os.listdir(self.wiki_dir)
if (f.endswith('.wiki') and
not re.match(r'^V\d', f) and # Excluded versioned .wiki files.
f not in unversioned_wikis)]
def BranchFiles(self):
"""Branches the .wiki files needed to be branched."""
print 'Branching %d .wiki files:' % (len(self.files_to_branch),)
os.chdir(self.wiki_dir)
for f in self.files_to_branch:
command = 'svn cp %s %s%s' % (f, self.version_prefix, f)
print command
os.system(command)
def UpdateLinksInBranchedFiles(self):
for f in self.files_to_branch:
source_file = os.path.join(self.wiki_dir, f)
versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f)
print 'Updating links in %s.' % (versioned_file,)
text = file(source_file, 'r').read()
new_text = self.search_for_re.sub(self.replace_with, text)
file(versioned_file, 'w').write(new_text)
def main():
if len(sys.argv) != 2:
sys.exit(__doc__)
brancher = WikiBrancher(sys.argv[1])
brancher.BranchFiles()
brancher.UpdateLinksInBranchedFiles()
if __name__ == '__main__':
main()
| mit |
AICP/external_chromium_org | tools/site_compare/commands/scrape.py | 189 | 1832 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Command for scraping images from a URL or list of URLs.
Prerequisites:
1. The command_line package from tools/site_compare
2. Either the IE BHO or Firefox extension (or both)
Installation:
1. Build the IE BHO, or call regsvr32 on a prebuilt binary
2. Add a file called "measurepageloadtimeextension@google.com" to
the default Firefox profile directory under extensions, containing
the path to the Firefox extension root
Invoke with the command line arguments as documented within
the command line.
"""
import command_line
from drivers import windowing
from utils import browser_iterate
def CreateCommand(cmdline):
"""Inserts the command and arguments into a command line for parsing."""
cmd = cmdline.AddCommand(
["scrape"],
"Scrapes an image from a URL or series of URLs.",
None,
ExecuteScrape)
browser_iterate.SetupIterationCommandLine(cmd)
cmd.AddArgument(
["-log", "--logfile"], "File to write text output", type="string")
cmd.AddArgument(
["-out", "--outdir"], "Directory to store scrapes", type="string", required=True)
def ExecuteScrape(command):
"""Executes the Scrape command."""
def ScrapeResult(url, proc, wnd, result):
"""Capture and save the scrape."""
if log_file: log_file.write(result)
# Scrape the page
image = windowing.ScrapeWindow(wnd)
filename = windowing.URLtoFilename(url, command["--outdir"], ".bmp")
image.save(filename)
if command["--logfile"]: log_file = open(command["--logfile"], "w")
else: log_file = None
browser_iterate.Iterate(command, ScrapeResult)
# Close the log file and return. We're done.
if log_file: log_file.close()
| bsd-3-clause |
adrianholovaty/django | tests/regressiontests/forms/tests/forms.py | 33 | 109561 | # -*- coding: utf-8 -*-
import datetime
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import *
from django.http import QueryDict
from django.template import Template, Context
from django.test import TestCase
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.safestring import mark_safe
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class PersonNew(Form):
first_name = CharField(widget=TextInput(attrs={'id': 'first_name_id'}))
last_name = CharField()
birthday = DateField()
class FormsTestCase(TestCase):
# A Form is a collection of Fields. It knows how to validate a set of data and it
# knows how to render itself in a couple of default ways (e.g., an HTML table).
# You can pass it data in __init__(), as a dictionary.
def test_form(self):
# Pass a dictionary to a Form's __init__().
p = Person({'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9'})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertHTMLEqual(p.errors.as_ul(), u'')
self.assertEqual(p.errors.as_text(), u'')
self.assertEqual(p.cleaned_data["first_name"], u'John')
self.assertEqual(p.cleaned_data["last_name"], u'Lennon')
self.assertEqual(p.cleaned_data["birthday"], datetime.date(1940, 10, 9))
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="first_name" value="John" id="id_first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="last_name" value="Lennon" id="id_last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />')
try:
p['nonexistentfield']
self.fail('Attempts to access non-existent fields should fail.')
except KeyError:
pass
form_output = []
for boundfield in p:
form_output.append(str(boundfield))
self.assertHTMLEqual('\n'.join(form_output), """<input type="text" name="first_name" value="John" id="id_first_name" />
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />""")
form_output = []
for boundfield in p:
form_output.append([boundfield.label, boundfield.data])
self.assertEqual(form_output, [
['First name', u'John'],
['Last name', u'Lennon'],
['Birthday', u'1940-10-9']
])
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>""")
def test_empty_dict(self):
# Empty dictionaries are valid, too.
p = Person({})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors['first_name'], [u'This field is required.'])
self.assertEqual(p.errors['last_name'], [u'This field is required.'])
self.assertEqual(p.errors['birthday'], [u'This field is required.'])
self.assertFalse(p.is_valid())
try:
p.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>""")
def test_unbound_form(self):
# If you don't pass any values to the Form's __init__(), or if you pass None,
# the Form will be considered unbound and won't do any validation. Form.errors
# will be an empty dictionary *but* Form.is_valid() will return False.
p = Person()
self.assertFalse(p.is_bound)
self.assertEqual(p.errors, {})
self.assertFalse(p.is_valid())
try:
p.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
self.assertHTMLEqual(str(p), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /></p>""")
def test_unicode_values(self):
# Unicode values are handled properly.
p = Person({'first_name': u'John', 'last_name': u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111', 'birthday': '1940-10-9'})
self.assertHTMLEqual(p.as_table(), u'<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>\n<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></td></tr>\n<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></td></tr>')
self.assertHTMLEqual(p.as_ul(), u'<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></li>\n<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></li>\n<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></li>')
self.assertHTMLEqual(p.as_p(), u'<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" value="John" id="id_first_name" /></p>\n<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" /></p>\n<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" value="1940-10-9" id="id_birthday" /></p>')
p = Person({'last_name': u'Lennon'})
self.assertEqual(p.errors['first_name'], [u'This field is required.'])
self.assertEqual(p.errors['birthday'], [u'This field is required.'])
self.assertFalse(p.is_valid())
self.assertHTMLEqual(p.errors.as_ul(), u'<ul class="errorlist"><li>first_name<ul class="errorlist"><li>This field is required.</li></ul></li><li>birthday<ul class="errorlist"><li>This field is required.</li></ul></li></ul>')
self.assertEqual(p.errors.as_text(), """* first_name
* This field is required.
* birthday
* This field is required.""")
try:
p.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
self.assertEqual(p['first_name'].errors, [u'This field is required.'])
self.assertHTMLEqual(p['first_name'].errors.as_ul(), u'<ul class="errorlist"><li>This field is required.</li></ul>')
self.assertEqual(p['first_name'].errors.as_text(), u'* This field is required.')
p = Person()
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="first_name" id="id_first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="last_name" id="id_last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="birthday" id="id_birthday" />')
def test_cleaned_data_only_fields(self):
# cleaned_data will always *only* contain a key for fields defined in the
# Form, even if you pass extra data when you define the Form. In this
# example, we pass a bunch of extra fields to the form constructor,
# but cleaned_data contains only the form's fields.
data = {'first_name': u'John', 'last_name': u'Lennon', 'birthday': u'1940-10-9', 'extra1': 'hello', 'extra2': 'hello'}
p = Person(data)
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], u'John')
self.assertEqual(p.cleaned_data['last_name'], u'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_optional_data(self):
# cleaned_data will include a key and value for *all* fields defined in the Form,
# even if the Form's data didn't include a value for fields that are not
# required. In this example, the data dictionary doesn't include a value for the
# "nick_name" field, but cleaned_data includes it. For CharFields, it's set to the
# empty string.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
nick_name = CharField(required=False)
data = {'first_name': u'John', 'last_name': u'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['nick_name'], u'')
self.assertEqual(f.cleaned_data['first_name'], u'John')
self.assertEqual(f.cleaned_data['last_name'], u'Lennon')
# For DateFields, it's set to None.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
birth_date = DateField(required=False)
data = {'first_name': u'John', 'last_name': u'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['birth_date'], None)
self.assertEqual(f.cleaned_data['first_name'], u'John')
self.assertEqual(f.cleaned_data['last_name'], u'Lennon')
def test_auto_id(self):
# "auto_id" tells the Form to add an "id" attribute to each form element.
# If it's a string that contains '%s', Django will use that as a format string
# into which the field's name will be inserted. It will also put a <label> around
# the human-readable labels for a field.
p = Person(auto_id='%s_id')
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="first_name_id">First name:</label></th><td><input type="text" name="first_name" id="first_name_id" /></td></tr>
<tr><th><label for="last_name_id">Last name:</label></th><td><input type="text" name="last_name" id="last_name_id" /></td></tr>
<tr><th><label for="birthday_id">Birthday:</label></th><td><input type="text" name="birthday" id="birthday_id" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></li>
<li><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></li>
<li><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="first_name_id">First name:</label> <input type="text" name="first_name" id="first_name_id" /></p>
<p><label for="last_name_id">Last name:</label> <input type="text" name="last_name" id="last_name_id" /></p>
<p><label for="birthday_id">Birthday:</label> <input type="text" name="birthday" id="birthday_id" /></p>""")
def test_auto_id_true(self):
# If auto_id is any True value whose str() does not contain '%s', the "id"
# attribute will be the name of the field.
p = Person(auto_id=True)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name">First name:</label> <input type="text" name="first_name" id="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>""")
def test_auto_id_false(self):
# If auto_id is any False value, an "id" attribute won't be output unless it
# was manually entered.
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
def test_id_on_field(self):
# In this example, auto_id is False, but the "id" attribute for the "first_name"
# field is given. Also note that field gets a <label>, while the others don't.
p = PersonNew(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
def test_auto_id_on_form_and_field(self):
# If the "id" attribute is specified in the Form and auto_id is True, the "id"
# attribute in the Form gets precedence.
p = PersonNew(auto_id=True)
self.assertHTMLEqual(p.as_ul(), """<li><label for="first_name_id">First name:</label> <input type="text" id="first_name_id" name="first_name" /></li>
<li><label for="last_name">Last name:</label> <input type="text" name="last_name" id="last_name" /></li>
<li><label for="birthday">Birthday:</label> <input type="text" name="birthday" id="birthday" /></li>""")
def test_various_boolean_values(self):
class SignupForm(Form):
email = EmailField()
get_spam = BooleanField()
f = SignupForm(auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="text" name="email" />')
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': True}, auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="text" name="email" value="test@example.com" />')
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
# 'True' or 'true' should be rendered without a value attribute
f = SignupForm({'email': 'test@example.com', 'get_spam': 'True'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': 'true'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input checked="checked" type="checkbox" name="get_spam" />')
# A value of 'False' or 'false' should be rendered unchecked
f = SignupForm({'email': 'test@example.com', 'get_spam': 'False'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
f = SignupForm({'email': 'test@example.com', 'get_spam': 'false'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" />')
def test_widget_output(self):
# Any Field can have a Widget class passed to its constructor:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea)
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['subject']), '<input type="text" name="subject" />')
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="10" cols="40"></textarea>')
# as_textarea(), as_text() and as_hidden() are shortcuts for changing the output
# widget type:
self.assertHTMLEqual(f['subject'].as_textarea(), u'<textarea name="subject" rows="10" cols="40"></textarea>')
self.assertHTMLEqual(f['message'].as_text(), u'<input type="text" name="message" />')
self.assertHTMLEqual(f['message'].as_hidden(), u'<input type="hidden" name="message" />')
# The 'widget' parameter to a Field can also be an instance:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea(attrs={'rows': 80, 'cols': 20}))
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="80" cols="20"></textarea>')
# Instance-level attrs are *not* carried over to as_textarea(), as_text() and
# as_hidden():
self.assertHTMLEqual(f['message'].as_text(), u'<input type="text" name="message" />')
f = ContactForm({'subject': 'Hello', 'message': 'I love you.'}, auto_id=False)
self.assertHTMLEqual(f['subject'].as_textarea(), u'<textarea rows="10" cols="40" name="subject">Hello</textarea>')
self.assertHTMLEqual(f['message'].as_text(), u'<input type="text" name="message" value="I love you." />')
self.assertHTMLEqual(f['message'].as_hidden(), u'<input type="hidden" name="message" value="I love you." />')
def test_forms_with_choices(self):
# For a form with a <select>, use ChoiceField:
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# A subtlety: If one of the choices' value is the empty string and the form is
# unbound, then the <option> for the empty-string choice will get selected="selected".
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('', '------'), ('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="" selected="selected">------</option>
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
# You can specify widget attributes in the Widget constructor.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(attrs={'class': 'foo'}))
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# When passing a custom widget instance to ChoiceField, note that setting
# 'choices' on the widget is meaningless. The widget will use the choices
# defined on the Field, not the ones defined on the Widget.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(choices=[('R', 'Ruby'), ('P', 'Perl')], attrs={'class': 'foo'}))
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected="selected">Python</option>
<option value="J">Java</option>
</select>""")
# You can set a ChoiceField's choices after the fact.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField()
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
</select>""")
f.fields['language'].choices = [('P', 'Python'), ('J', 'Java')]
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
def test_forms_with_radio(self):
# Add widget=RadioSelect to use that widget with a ChoiceField.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=RadioSelect)
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul>""")
self.assertHTMLEqual(f.as_table(), """<tr><th>Name:</th><td><input type="text" name="name" /></td></tr>
<tr><th>Language:</th><td><ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li>Name: <input type="text" name="name" /></li>
<li>Language: <ul>
<li><label><input type="radio" name="language" value="P" /> Python</label></li>
<li><label><input type="radio" name="language" value="J" /> Java</label></li>
</ul></li>""")
# Regarding auto_id and <label>, RadioSelect is a special case. Each radio button
# gets a distinct ID, formed by appending an underscore plus the button's
# zero-based index.
f = FrameworkForm(auto_id='id_%s')
self.assertHTMLEqual(str(f['language']), """<ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul>""")
# When RadioSelect is used with auto_id, and the whole form is printed using
# either as_table() or as_ul(), the label for the RadioSelect will point to the
# ID of the *first* radio button.
self.assertHTMLEqual(f.as_table(), """<tr><th><label for="id_name">Name:</label></th><td><input type="text" name="name" id="id_name" /></td></tr>
<tr><th><label for="id_language_0">Language:</label></th><td><ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></li>""")
self.assertHTMLEqual(f.as_p(), """<p><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p><label for="id_language_0">Language:</label> <ul>
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" /> Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" /> Java</label></li>
</ul></p>""")
def test_form_with_iterable_boundfield(self):
class BeatleForm(Form):
name = ChoiceField(choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')], widget=RadioSelect)
f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), """<label><input type="radio" name="name" value="john" /> John</label>
<label><input type="radio" name="name" value="paul" /> Paul</label>
<label><input type="radio" name="name" value="george" /> George</label>
<label><input type="radio" name="name" value="ringo" /> Ringo</label>""")
self.assertHTMLEqual('\n'.join(['<div>%s</div>' % bf for bf in f['name']]), """<div><label><input type="radio" name="name" value="john" /> John</label></div>
<div><label><input type="radio" name="name" value="paul" /> Paul</label></div>
<div><label><input type="radio" name="name" value="george" /> George</label></div>
<div><label><input type="radio" name="name" value="ringo" /> Ringo</label></div>""")
def test_form_with_noniterable_boundfield(self):
# You can iterate over any BoundField, not just those with widget=RadioSelect.
class BeatleForm(Form):
name = CharField()
f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), u'<input type="text" name="name" />')
def test_forms_with_multiple_choice(self):
# MultipleChoiceField is a special case, as its data is required to be a list:
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField()
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
</select>""")
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P">Paul McCartney</option>
</select>""")
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(str(f['name']), '<input type="text" name="name" value="Yesterday" />')
self.assertHTMLEqual(str(f['composers']), """<select multiple="multiple" name="composers">
<option value="J">John Lennon</option>
<option value="P" selected="selected">Paul McCartney</option>
</select>""")
def test_hidden_data(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
# MultipleChoiceField rendered as_hidden() is a special case. Because it can
# have multiple values, its as_hidden() renders multiple <input type="hidden">
# tags.
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), '<input type="hidden" name="composers" value="P" />')
f = SongForm({'name': 'From Me To You', 'composers': ['P', 'J']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), """<input type="hidden" name="composers" value="P" />
<input type="hidden" name="composers" value="J" />""")
# DateTimeField rendered as_hidden() is special too
class MessageForm(Form):
when = SplitDateTimeField()
f = MessageForm({'when_0': '1992-01-01', 'when_1': '01:01'})
self.assertTrue(f.is_valid())
self.assertHTMLEqual(str(f['when']), '<input type="text" name="when_0" value="1992-01-01" id="id_when_0" /><input type="text" name="when_1" value="01:01" id="id_when_1" />')
self.assertHTMLEqual(f['when'].as_hidden(), '<input type="hidden" name="when_0" value="1992-01-01" id="id_when_0" /><input type="hidden" name="when_1" value="01:01" id="id_when_1" />')
def test_mulitple_choice_checkbox(self):
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J', 'P']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked="checked" type="checkbox" name="composers" value="J" /> John Lennon</label></li>
<li><label><input checked="checked" type="checkbox" name="composers" value="P" /> Paul McCartney</label></li>
</ul>""")
def test_checkbox_auto_id(self):
# Regarding auto_id, CheckboxSelectMultiple is a special case. Each checkbox
# gets a distinct ID, formed by appending an underscore plus the checkbox's
# zero-based index.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
f = SongForm(auto_id='%s_id')
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label for="composers_id_0"><input type="checkbox" name="composers" value="J" id="composers_id_0" /> John Lennon</label></li>
<li><label for="composers_id_1"><input type="checkbox" name="composers" value="P" id="composers_id_1" /> Paul McCartney</label></li>
</ul>""")
def test_multiple_choice_list_data(self):
# Data for a MultipleChoiceField should be a list. QueryDict, MultiValueDict and
# MergeDict (when created as a merge of MultiValueDicts) conveniently work with
# this.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
data = {'name': 'Yesterday', 'composers': ['J', 'P']}
f = SongForm(data)
self.assertEqual(f.errors, {})
data = QueryDict('name=Yesterday&composers=J&composers=P')
f = SongForm(data)
self.assertEqual(f.errors, {})
data = MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P']))
f = SongForm(data)
self.assertEqual(f.errors, {})
data = MergeDict(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])))
f = SongForm(data)
self.assertEqual(f.errors, {})
def test_multiple_hidden(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=CheckboxSelectMultiple)
# The MultipleHiddenInput widget renders multiple values as hidden fields.
class SongFormHidden(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')], widget=MultipleHiddenInput)
f = SongFormHidden(MultiValueDict(dict(name=['Yesterday'], composers=['J', 'P'])), auto_id=False)
self.assertHTMLEqual(f.as_ul(), """<li>Name: <input type="text" name="name" value="Yesterday" /><input type="hidden" name="composers" value="J" />
<input type="hidden" name="composers" value="P" /></li>""")
# When using CheckboxSelectMultiple, the framework expects a list of input and
# returns a list of input.
f = SongForm({'name': 'Yesterday'}, auto_id=False)
self.assertEqual(f.errors['composers'], [u'This field is required.'])
f = SongForm({'name': 'Yesterday', 'composers': ['J']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], [u'J'])
self.assertEqual(f.cleaned_data['name'], u'Yesterday')
f = SongForm({'name': 'Yesterday', 'composers': ['J', 'P']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], [u'J', u'P'])
self.assertEqual(f.cleaned_data['name'], u'Yesterday')
def test_escaping(self):
# Validation errors are HTML-escaped when output as HTML.
class EscapingForm(Form):
special_name = CharField(label="<em>Special</em> Field")
special_safe_name = CharField(label=mark_safe("<em>Special</em> Field"))
def clean_special_name(self):
raise ValidationError("Something's wrong with '%s'" % self.cleaned_data['special_name'])
def clean_special_safe_name(self):
raise ValidationError(mark_safe("'<b>%s</b>' is a safe string" % self.cleaned_data['special_safe_name']))
f = EscapingForm({'special_name': "Nothing to escape", 'special_safe_name': "Nothing to escape"}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Nothing to escape'</li></ul><input type="text" name="special_name" value="Nothing to escape" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b>Nothing to escape</b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="Nothing to escape" /></td></tr>""")
f = EscapingForm({
'special_name': "Should escape < & > and <script>alert('xss')</script>",
'special_safe_name': "<i>Do not escape</i>"
}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>Something's wrong with 'Should escape < & > and <script>alert('xss')</script>'</li></ul><input type="text" name="special_name" value="Should escape < & > and <script>alert('xss')</script>" /></td></tr>
<tr><th><em>Special</em> Field:</th><td><ul class="errorlist"><li>'<b><i>Do not escape</i></b>' is a safe string</li></ul><input type="text" name="special_safe_name" value="<i>Do not escape</i>" /></td></tr>""")
def test_validating_multiple_fields(self):
# There are a couple of ways to do multiple-field validation. If you want the
# validation message to be associated with a particular field, implement the
# clean_XXX() method on the Form, where XXX is the field name. As in
# Field.clean(), the clean_XXX() method should return the cleaned value. In the
# clean_XXX() method, you have access to self.cleaned_data, which is a dictionary
# of all the data that has been cleaned *so far*, in order by the fields,
# including the current field (e.g., the field XXX if you're in clean_XXX()).
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean_password2(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError(u'Please make sure your passwords match.')
return self.cleaned_data['password2']
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertEqual(f.errors['username'], [u'This field is required.'])
self.assertEqual(f.errors['password1'], [u'This field is required.'])
self.assertEqual(f.errors['password2'], [u'This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['password2'], [u'Please make sure your passwords match.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], u'adrian')
self.assertEqual(f.cleaned_data['password1'], u'foo')
self.assertEqual(f.cleaned_data['password2'], u'foo')
# Another way of doing multiple-field validation is by implementing the
# Form's clean() method. If you do this, any ValidationError raised by that
# method will not be associated with a particular field; it will have a
# special-case association with the field named '__all__'.
# Note that in Form.clean(), you have access to self.cleaned_data, a dictionary of
# all the fields/values that have *not* raised a ValidationError. Also note
# Form.clean() is required to return a dictionary of all clean data.
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError(u'Please make sure your passwords match.')
return self.cleaned_data
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(f.as_table(), """<tr><th>Username:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="password" name="password2" /></td></tr>""")
self.assertEqual(f.errors['username'], [u'This field is required.'])
self.assertEqual(f.errors['password1'], [u'This field is required.'])
self.assertEqual(f.errors['password2'], [u'This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['__all__'], [u'Please make sure your passwords match.'])
self.assertHTMLEqual(f.as_table(), """<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><input type="text" name="username" value="adrian" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></li>
<li>Username: <input type="text" name="username" value="adrian" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" /></li>
<li>Password2: <input type="password" name="password2" /></li>""")
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], u'adrian')
self.assertEqual(f.cleaned_data['password1'], u'foo')
self.assertEqual(f.cleaned_data['password2'], u'foo')
def test_dynamic_construction(self):
# It's possible to construct a Form dynamically by adding to the self.fields
# dictionary in __init__(). Don't forget to call Form.__init__() within the
# subclass' __init__().
class Person(Form):
first_name = CharField()
last_name = CharField()
def __init__(self, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
self.fields['birthday'] = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /></td></tr>""")
# Instances of a dynamic Form do not persist fields from one Form instance to
# the next.
class MyForm(Form):
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>""")
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>""")
class MyForm(Form):
default_field_1 = CharField()
default_field_2 = CharField()
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>""")
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(my_form.as_table(), """<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" /></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>""")
# Similarly, changes to field attributes do not persist from one Form instance
# to the next.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
def __init__(self, names_required=False, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if names_required:
self.fields['first_name'].required = True
self.fields['first_name'].widget.attrs['class'] = 'required'
self.fields['last_name'].required = True
self.fields['last_name'].widget.attrs['class'] = 'required'
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
f = Person(names_required=True)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (True, True))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({'class': 'required'}, {'class': 'required'}))
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
class Person(Form):
first_name = CharField(max_length=30)
last_name = CharField(max_length=30)
def __init__(self, name_max_length=None, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if name_max_length:
self.fields['first_name'].max_length = name_max_length
self.fields['last_name'].max_length = name_max_length
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
f = Person(name_max_length=20)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (20, 20))
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
# Similarly, choices do not persist from one Form instance to the next.
# Refs #15127.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
gender = ChoiceField(choices=(('f', 'Female'), ('m', 'Male')))
def __init__(self, allow_unspec_gender=False, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
if allow_unspec_gender:
self.fields['gender'].choices += (('u', 'Unspecified'),)
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
f = Person(allow_unspec_gender=True)
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male'), ('u', 'Unspecified')])
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
def test_validators_independence(self):
""" Test that we are able to modify a form field validators list without polluting
other forms """
from django.core.validators import MaxValueValidator
class MyForm(Form):
myfield = CharField(max_length=25)
f1 = MyForm()
f2 = MyForm()
f1.fields['myfield'].validators[0] = MaxValueValidator(12)
self.assertFalse(f1.fields['myfield'].validators[0] == f2.fields['myfield'].validators[0])
def test_hidden_widget(self):
# HiddenInput widgets are displayed differently in the as_table(), as_ul())
# and as_p() output of a Form -- their verbose names are not displayed, and a
# separate row is not displayed. They're displayed in the last row of the
# form, directly after that row's form element.
class Person(Form):
first_name = CharField()
last_name = CharField()
hidden_text = CharField(widget=HiddenInput)
birthday = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>First name:</th><td><input type="text" name="first_name" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p>First name: <input type="text" name="first_name" /></p>
<p>Last name: <input type="text" name="last_name" /></p>
<p>Birthday: <input type="text" name="birthday" /><input type="hidden" name="hidden_text" /></p>""")
# With auto_id set, a HiddenInput still gets an ID, but it doesn't get a label.
p = Person(auto_id='id_%s')
self.assertHTMLEqual(p.as_table(), """<tr><th><label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" id="id_last_name" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></li>
<li><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></li>
<li><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<p><label for="id_first_name">First name:</label> <input type="text" name="first_name" id="id_first_name" /></p>
<p><label for="id_last_name">Last name:</label> <input type="text" name="last_name" id="id_last_name" /></p>
<p><label for="id_birthday">Birthday:</label> <input type="text" name="birthday" id="id_birthday" /><input type="hidden" name="hidden_text" id="id_hidden_text" /></p>""")
# If a field with a HiddenInput has errors, the as_table() and as_ul() output
# will include the error message(s) with the text "(Hidden field [fieldname]) "
# prepended. This message is displayed at the top of the output, regardless of
# its field's order in the form.
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'}, auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><td colspan="2"><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></td></tr>
<tr><th>First name:</th><td><input type="text" name="first_name" value="John" /></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" value="Lennon" /></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></td></tr>""")
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul></li>
<li>First name: <input type="text" name="first_name" value="John" /></li>
<li>Last name: <input type="text" name="last_name" value="Lennon" /></li>
<li>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>(Hidden field hidden_text) This field is required.</li></ul>
<p>First name: <input type="text" name="first_name" value="John" /></p>
<p>Last name: <input type="text" name="last_name" value="Lennon" /></p>
<p>Birthday: <input type="text" name="birthday" value="1940-10-9" /><input type="hidden" name="hidden_text" /></p>""")
# A corner case: It's possible for a form to have only HiddenInputs.
class TestForm(Form):
foo = CharField(widget=HiddenInput)
bar = CharField(widget=HiddenInput)
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
self.assertHTMLEqual(p.as_ul(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
self.assertHTMLEqual(p.as_p(), '<input type="hidden" name="foo" /><input type="hidden" name="bar" />')
def test_field_order(self):
# A Form's fields are displayed in the same order in which they were defined.
class TestForm(Form):
field1 = CharField()
field2 = CharField()
field3 = CharField()
field4 = CharField()
field5 = CharField()
field6 = CharField()
field7 = CharField()
field8 = CharField()
field9 = CharField()
field10 = CharField()
field11 = CharField()
field12 = CharField()
field13 = CharField()
field14 = CharField()
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>Field1:</th><td><input type="text" name="field1" /></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" /></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" /></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" /></td></tr>
<tr><th>Field5:</th><td><input type="text" name="field5" /></td></tr>
<tr><th>Field6:</th><td><input type="text" name="field6" /></td></tr>
<tr><th>Field7:</th><td><input type="text" name="field7" /></td></tr>
<tr><th>Field8:</th><td><input type="text" name="field8" /></td></tr>
<tr><th>Field9:</th><td><input type="text" name="field9" /></td></tr>
<tr><th>Field10:</th><td><input type="text" name="field10" /></td></tr>
<tr><th>Field11:</th><td><input type="text" name="field11" /></td></tr>
<tr><th>Field12:</th><td><input type="text" name="field12" /></td></tr>
<tr><th>Field13:</th><td><input type="text" name="field13" /></td></tr>
<tr><th>Field14:</th><td><input type="text" name="field14" /></td></tr>""")
def test_form_html_attributes(self):
# Some Field classes have an effect on the HTML attributes of their associated
# Widget. If you set max_length in a CharField and its associated widget is
# either a TextInput or PasswordInput, then the widget's rendered HTML will
# include the "maxlength" attribute.
class UserRegistration(Form):
username = CharField(max_length=10) # uses TextInput by default
password = CharField(max_length=10, widget=PasswordInput)
realname = CharField(max_length=10, widget=TextInput) # redundantly define widget, just to test
address = CharField() # no max_length defined here
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>
<li>Realname: <input type="text" name="realname" maxlength="10" /></li>
<li>Address: <input type="text" name="address" /></li>""")
# If you specify a custom "attrs" that includes the "maxlength" attribute,
# the Field's max_length attribute will override whatever "maxlength" you specify
# in "attrs".
class UserRegistration(Form):
username = CharField(max_length=10, widget=TextInput(attrs={'maxlength': 20}))
password = CharField(max_length=10, widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" maxlength="10" /></li>""")
def test_specifying_labels(self):
# You can specify the label for a field by using the 'label' argument to a Field
# class. If you don't specify 'label', Django will use the field name with
# underscores converted to spaces, and the initial letter capitalized.
class UserRegistration(Form):
username = CharField(max_length=10, label='Your username')
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput, label='Password (again)')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Your username: <input type="text" name="username" maxlength="10" /></li>
<li>Password1: <input type="password" name="password1" /></li>
<li>Password (again): <input type="password" name="password2" /></li>""")
# Labels for as_* methods will only end in a colon if they don't end in other
# punctuation already.
class Questions(Form):
q1 = CharField(label='The first question')
q2 = CharField(label='What is your name?')
q3 = CharField(label='The answer to life is:')
q4 = CharField(label='Answer this question!')
q5 = CharField(label='The last question. Period.')
self.assertHTMLEqual(Questions(auto_id=False).as_p(), """<p>The first question: <input type="text" name="q1" /></p>
<p>What is your name? <input type="text" name="q2" /></p>
<p>The answer to life is: <input type="text" name="q3" /></p>
<p>Answer this question! <input type="text" name="q4" /></p>
<p>The last question. Period. <input type="text" name="q5" /></p>""")
self.assertHTMLEqual(Questions().as_p(), """<p><label for="id_q1">The first question:</label> <input type="text" name="q1" id="id_q1" /></p>
<p><label for="id_q2">What is your name?</label> <input type="text" name="q2" id="id_q2" /></p>
<p><label for="id_q3">The answer to life is:</label> <input type="text" name="q3" id="id_q3" /></p>
<p><label for="id_q4">Answer this question!</label> <input type="text" name="q4" id="id_q4" /></p>
<p><label for="id_q5">The last question. Period.</label> <input type="text" name="q5" id="id_q5" /></p>""")
# A label can be a Unicode object or a bytestring with special characters.
class UserRegistration(Form):
username = CharField(max_length=10, label='ŠĐĆŽćžšđ')
password = CharField(widget=PasswordInput, label=u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), u'<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="text" name="username" maxlength="10" /></li>\n<li>\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111: <input type="password" name="password" /></li>')
# If a label is set to the empty string for a field, that field won't get a label.
class UserRegistration(Form):
username = CharField(max_length=10, label='')
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li> <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(p.as_ul(), """<li> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>""")
# If label is None, Django will auto-create the label from the field name. This
# is default behavior.
class UserRegistration(Form):
username = CharField(max_length=10, label=None)
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_username">Username:</label> <input id="id_username" type="text" name="username" maxlength="10" /></li>
<li><label for="id_password">Password:</label> <input type="password" name="password" id="id_password" /></li>""")
def test_label_suffix(self):
# You can specify the 'label_suffix' argument to a Form class to modify the
# punctuation symbol used at the end of a label. By default, the colon (:) is
# used, and is only appended to the label if the label doesn't already end with a
# punctuation symbol: ., !, ? or :. If you specify a different suffix, it will
# be appended regardless of the last character of the label.
class FavoriteForm(Form):
color = CharField(label='Favorite color?')
animal = CharField(label='Favorite animal')
f = FavoriteForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal: <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix='?')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal? <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix='')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" /></li>
<li>Favorite animal <input type="text" name="animal" /></li>""")
f = FavoriteForm(auto_id=False, label_suffix=u'\u2192')
self.assertHTMLEqual(f.as_ul(), u'<li>Favorite color? <input type="text" name="color" /></li>\n<li>Favorite animal\u2192 <input type="text" name="animal" /></li>')
def test_initial_data(self):
# You can specify initial data for a field by using the 'initial' argument to a
# Field class. This initial data is displayed when a Form is rendered with *no*
# data. It is not displayed when a Form is rendered with any data (including an
# empty dictionary). Also, the initial value is *not* used if data for a
# particular required field isn't provided.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
# Here, we're submitting data, so the initial value will *not* be displayed.
p = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': u''}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': u'foo'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
# An 'initial' value is *not* used as a fallback if data is not provided. In this
# example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'})
self.assertEqual(p.errors['username'], [u'This field is required.'])
self.assertFalse(p.is_valid())
def test_dynamic_initial_data(self):
# The previous technique dealt with "hard-coded" initial data, but it's also
# possible to specify initial data after you've already created the Form class
# (i.e., at runtime). Use the 'initial' parameter to the Form constructor. This
# should be a dictionary containing initial values for one or more fields in the
# form, keyed by field name.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
p = UserRegistration(initial={'username': 'stephane'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': u''}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
p = UserRegistration({'username': u'foo'}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>""")
# A dynamic 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': 'django'})
self.assertEqual(p.errors['username'], [u'This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
p = UserRegistration(initial={'username': 'babik'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="babik" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>""")
def test_callable_initial_data(self):
# The previous technique dealt with raw values as initial data, but it's also
# possible to specify callable data.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')])
# We need to define functions that get called later.)
def initial_django():
return 'django'
def initial_stephane():
return 'stephane'
def initial_options():
return ['f','b']
def initial_other_options():
return ['b','w']
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>""")
p = UserRegistration({'username': u''}, initial={'username': initial_django}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li><ul class="errorlist"><li>This field is required.</li></ul>Username: <input type="text" name="username" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>""")
p = UserRegistration({'username': u'foo', 'options':['f','b']}, initial={'username': initial_django}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
# A callable 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': initial_django, 'options': initial_options})
self.assertEqual(p.errors['username'], [u'This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial=initial_django)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(choices=[('f','foo'),('b','bar'),('w','whiz')], initial=initial_other_options)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="django" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w" selected="selected">whiz</option>
</select></li>""")
p = UserRegistration(initial={'username': initial_stephane, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="stephane" maxlength="10" /></li>
<li>Password: <input type="password" name="password" /></li>
<li>Options: <select multiple="multiple" name="options">
<option value="f" selected="selected">foo</option>
<option value="b" selected="selected">bar</option>
<option value="w">whiz</option>
</select></li>""")
def test_boundfield_values(self):
# It's possible to get to the value which would be used for rendering
# the widget for a field by using the BoundField's value method.
class UserRegistration(Form):
username = CharField(max_length=10, initial='djangonaut')
password = CharField(widget=PasswordInput)
unbound = UserRegistration()
bound = UserRegistration({'password': 'foo'})
self.assertEqual(bound['username'].value(), None)
self.assertEqual(unbound['username'].value(), 'djangonaut')
self.assertEqual(bound['password'].value(), 'foo')
self.assertEqual(unbound['password'].value(), None)
def test_help_text(self):
# You can specify descriptive text for a field by using the 'help_text' argument)
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., user@example.com')
password = CharField(widget=PasswordInput, help_text='Choose wisely.')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></li>""")
self.assertHTMLEqual(p.as_p(), """<p>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></p>
<p>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></p>""")
self.assertHTMLEqual(p.as_table(), """<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /><br /><span class="helptext">e.g., user@example.com</span></td></tr>
<tr><th>Password:</th><td><input type="password" name="password" /><br /><span class="helptext">Choose wisely.</span></td></tr>""")
# The help text is displayed whether or not data is provided for the form.
p = UserRegistration({'username': u'foo'}, auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>Password: <input type="password" name="password" /> <span class="helptext">Choose wisely.</span></li>""")
# help_text is not displayed for hidden fields. It can be used for documentation
# purposes, though.
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., user@example.com')
password = CharField(widget=PasswordInput)
next = CharField(widget=HiddenInput, initial='/', help_text='Redirect destination')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">e.g., user@example.com</span></li>
<li>Password: <input type="password" name="password" /><input type="hidden" name="next" value="/" /></li>""")
# Help text can include arbitrary Unicode characters.
class UserRegistration(Form):
username = CharField(max_length=10, help_text='ŠĐĆŽćžšđ')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), u'<li>Username: <input type="text" name="username" maxlength="10" /> <span class="helptext">\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111</span></li>')
def test_subclassing_forms(self):
# You can subclass a Form to add fields. The resulting form subclass will have
# all of the fields of the parent Form, plus whichever fields you define in the
# subclass.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Musician(Person):
instrument = CharField()
p = Person(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>""")
m = Musician(auto_id=False)
self.assertHTMLEqual(m.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>""")
# Yes, you can subclass multiple forms. The fields are added in the order in
# which the parent classes are listed.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Instrument(Form):
instrument = CharField()
class Beatle(Person, Instrument):
haircut_type = CharField()
b = Beatle(auto_id=False)
self.assertHTMLEqual(b.as_ul(), """<li>First name: <input type="text" name="first_name" /></li>
<li>Last name: <input type="text" name="last_name" /></li>
<li>Birthday: <input type="text" name="birthday" /></li>
<li>Instrument: <input type="text" name="instrument" /></li>
<li>Haircut type: <input type="text" name="haircut_type" /></li>""")
def test_forms_with_prefixes(self):
# Sometimes it's necessary to have multiple forms display on the same HTML page,
# or multiple copies of the same form. We can accomplish this with form prefixes.
# Pass the keyword argument 'prefix' to the Form constructor to use this feature.
# This value will be prepended to each HTML form field name. One way to think
# about this is "namespaces for HTML forms". Notice that in the data argument,
# each field's key has the prefix, in this case 'person1', prepended to the
# actual field name.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
data = {
'person1-first_name': u'John',
'person1-last_name': u'Lennon',
'person1-birthday': u'1940-10-9'
}
p = Person(data, prefix='person1')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_person1-first_name">First name:</label> <input type="text" name="person1-first_name" value="John" id="id_person1-first_name" /></li>
<li><label for="id_person1-last_name">Last name:</label> <input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" /></li>
<li><label for="id_person1-birthday">Birthday:</label> <input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" /></li>""")
self.assertHTMLEqual(str(p['first_name']), '<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" />')
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" />')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" />')
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], u'John')
self.assertEqual(p.cleaned_data['last_name'], u'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
# Let's try submitting some bad data to make sure form.errors and field.errors
# work as expected.
data = {
'person1-first_name': u'',
'person1-last_name': u'',
'person1-birthday': u''
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], [u'This field is required.'])
self.assertEqual(p.errors['last_name'], [u'This field is required.'])
self.assertEqual(p.errors['birthday'], [u'This field is required.'])
self.assertEqual(p['first_name'].errors, [u'This field is required.'])
try:
p['person1-first_name'].errors
self.fail('Attempts to access non-existent fields should fail.')
except KeyError:
pass
# In this example, the data doesn't have a prefix, but the form requires it, so
# the form doesn't "see" the fields.
data = {
'first_name': u'John',
'last_name': u'Lennon',
'birthday': u'1940-10-9'
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], [u'This field is required.'])
self.assertEqual(p.errors['last_name'], [u'This field is required.'])
self.assertEqual(p.errors['birthday'], [u'This field is required.'])
# With prefixes, a single data dictionary can hold data for multiple instances
# of the same form.
data = {
'person1-first_name': u'John',
'person1-last_name': u'Lennon',
'person1-birthday': u'1940-10-9',
'person2-first_name': u'Jim',
'person2-last_name': u'Morrison',
'person2-birthday': u'1943-12-8'
}
p1 = Person(data, prefix='person1')
self.assertTrue(p1.is_valid())
self.assertEqual(p1.cleaned_data['first_name'], u'John')
self.assertEqual(p1.cleaned_data['last_name'], u'Lennon')
self.assertEqual(p1.cleaned_data['birthday'], datetime.date(1940, 10, 9))
p2 = Person(data, prefix='person2')
self.assertTrue(p2.is_valid())
self.assertEqual(p2.cleaned_data['first_name'], u'Jim')
self.assertEqual(p2.cleaned_data['last_name'], u'Morrison')
self.assertEqual(p2.cleaned_data['birthday'], datetime.date(1943, 12, 8))
# By default, forms append a hyphen between the prefix and the field name, but a
# form can alter that behavior by implementing the add_prefix() method. This
# method takes a field name and returns the prefixed field, according to
# self.prefix.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
def add_prefix(self, field_name):
return self.prefix and '%s-prefix-%s' % (self.prefix, field_name) or field_name
p = Person(prefix='foo')
self.assertHTMLEqual(p.as_ul(), """<li><label for="id_foo-prefix-first_name">First name:</label> <input type="text" name="foo-prefix-first_name" id="id_foo-prefix-first_name" /></li>
<li><label for="id_foo-prefix-last_name">Last name:</label> <input type="text" name="foo-prefix-last_name" id="id_foo-prefix-last_name" /></li>
<li><label for="id_foo-prefix-birthday">Birthday:</label> <input type="text" name="foo-prefix-birthday" id="id_foo-prefix-birthday" /></li>""")
data = {
'foo-prefix-first_name': u'John',
'foo-prefix-last_name': u'Lennon',
'foo-prefix-birthday': u'1940-10-9'
}
p = Person(data, prefix='foo')
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], u'John')
self.assertEqual(p.cleaned_data['last_name'], u'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_forms_with_null_boolean(self):
# NullBooleanField is a bit of a special case because its presentation (widget)
# is different than its data. This is handled transparently, though.
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
p = Person({'name': u'Joe'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': u'Joe', 'is_cool': u'1'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': u'Joe', 'is_cool': u'2'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': u'Joe', 'is_cool': u'3'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
p = Person({'name': u'Joe', 'is_cool': True}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2" selected="selected">Yes</option>
<option value="3">No</option>
</select>""")
p = Person({'name': u'Joe', 'is_cool': False}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="1">Unknown</option>
<option value="2">Yes</option>
<option value="3" selected="selected">No</option>
</select>""")
def test_forms_with_file_fields(self):
# FileFields are a special case because they take their data from the request.FILES,
# not request.POST.
class FileForm(Form):
file1 = FileField()
f = FileForm(auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', '')}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>The submitted file is empty.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': 'something that is not a file'}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><ul class="errorlist"><li>No file was submitted. Check the encoding type on the form.</li></ul><input type="file" name="file1" /></td></tr>')
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', 'some content')}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
self.assertTrue(f.is_valid())
f = FileForm(data={}, files={'file1': SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह')}, auto_id=False)
self.assertHTMLEqual(f.as_table(), '<tr><th>File1:</th><td><input type="file" name="file1" /></td></tr>')
def test_basic_processing_in_view(self):
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError(u'Please make sure your passwords match.')
return self.cleaned_data
def my_function(method, post_data):
if method == 'POST':
form = UserRegistration(post_data, auto_id=False)
else:
form = UserRegistration(auto_id=False)
if form.is_valid():
return 'VALID: %r' % form.cleaned_data
t = Template('<form action="" method="post">\n<table>\n{{ form }}\n</table>\n<input type="submit" />\n</form>')
return t.render(Context({'form': form}))
# Case 1: GET (an empty form, with no errors).)
self.assertHTMLEqual(my_function('GET', {}), """<form action="" method="post">
<table>
<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
</table>
<input type="submit" />
</form>""")
# Case 2: POST with erroneous data (a redisplayed form, with errors).)
self.assertHTMLEqual(my_function('POST', {'username': 'this-is-a-long-username', 'password1': 'foo', 'password2': 'bar'}), """<form action="" method="post">
<table>
<tr><td colspan="2"><ul class="errorlist"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><ul class="errorlist"><li>Ensure this value has at most 10 characters (it has 23).</li></ul><input type="text" name="username" value="this-is-a-long-username" maxlength="10" /></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" /></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" /></td></tr>
</table>
<input type="submit" />
</form>""")
# Case 3: POST with valid data (the success message).)
self.assertHTMLEqual(my_function('POST', {'username': 'adrian', 'password1': 'secret', 'password2': 'secret'}), "VALID: {'username': u'adrian', 'password1': u'secret', 'password2': u'secret'}")
def test_templates_with_forms(self):
class UserRegistration(Form):
username = CharField(max_length=10, help_text="Good luck picking a username that doesn't already exist.")
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise ValidationError(u'Please make sure your passwords match.')
return self.cleaned_data
# You have full flexibility in displaying form fields in a template. Just pass a
# Form instance to the template, and use "dot" access to refer to individual
# fields. Note, however, that this flexibility comes with the responsibility of
# displaying all the errors, including any that might not be associated with a
# particular field.
t = Template('''<form action="">
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django'}, auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password: <input type="password" name="password1" /></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
# Use form.[field].label to output a field's label. You can specify the label for
# a field by using the 'label' argument to a Field class. If you don't specify
# 'label', Django will use the field name with underscores converted to spaces,
# and the initial letter capitalized.
t = Template('''<form action="">
<p><label>{{ form.username.label }}: {{ form.username }}</label></p>
<p><label>{{ form.password1.label }}: {{ form.password1 }}</label></p>
<p><label>{{ form.password2.label }}: {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p><label>Username: <input type="text" name="username" maxlength="10" /></label></p>
<p><label>Password1: <input type="password" name="password1" /></label></p>
<p><label>Password2: <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
# User form.[field].label_tag to output a field's label with a <label> tag
# wrapped around it, but *only* if the given field has an "id" attribute.
# Recall from above that passing the "auto_id" argument to a Form gives each
# field an "id" attribute.
t = Template('''<form action="">
<p>{{ form.username.label_tag }}: {{ form.username }}</p>
<p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /></p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>""")
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id='id_%s')})), """<form action="">
<p><label for="id_username">Username</label>: <input id="id_username" type="text" name="username" maxlength="10" /></p>
<p><label for="id_password1">Password1</label>: <input type="password" name="password1" id="id_password1" /></p>
<p><label for="id_password2">Password2</label>: <input type="password" name="password2" id="id_password2" /></p>
<input type="submit" />
</form>""")
# User form.[field].help_text to output a field's help text. If the given field
# does not have help text, nothing will be output.
t = Template('''<form action="">
<p>{{ form.username.label_tag }}: {{ form.username }}<br />{{ form.username.help_text }}</p>
<p>{{ form.password1.label_tag }}: {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }}: {{ form.password2 }}</p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form action="">
<p>Username: <input type="text" name="username" maxlength="10" /><br />Good luck picking a username that doesn't already exist.</p>
<p>Password1: <input type="password" name="password1" /></p>
<p>Password2: <input type="password" name="password2" /></p>
<input type="submit" />
</form>""")
self.assertEqual(Template('{{ form.password1.help_text }}').render(Context({'form': UserRegistration(auto_id=False)})), u'')
# The label_tag() method takes an optional attrs argument: a dictionary of HTML
# attributes to add to the <label> tag.
f = UserRegistration(auto_id='id_%s')
form_output = []
for bf in f:
form_output.append(bf.label_tag(attrs={'class': 'pretty'}))
expected_form_output = [
'<label for="id_username" class="pretty">Username</label>',
'<label for="id_password1" class="pretty">Password1</label>',
'<label for="id_password2" class="pretty">Password2</label>',
]
self.assertEqual(len(form_output), len(expected_form_output))
for i in range(len(form_output)):
self.assertHTMLEqual(form_output[i], expected_form_output[i])
# To display the errors that aren't associated with a particular field -- e.g.,
# the errors caused by Form.clean() -- use {{ form.non_field_errors }} in the
# template. If used on its own, it is displayed as a <ul> (or an empty string, if
# the list of errors is empty). You can also use it in {% if %} statements.
t = Template('''<form action="">
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)})), """<form action="">
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
t = Template('''<form action="">
{{ form.non_field_errors }}
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" />
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)})), """<form action="">
<ul class="errorlist"><li>Please make sure your passwords match.</li></ul>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" /></label></p>
<p><label>Password: <input type="password" name="password1" /></label></p>
<p><label>Password (again): <input type="password" name="password2" /></label></p>
<input type="submit" />
</form>""")
def test_empty_permitted(self):
# Sometimes (pretty much in formsets) we want to allow a form to pass validation
# if it is completely empty. We can accomplish this by using the empty_permitted
# agrument to a form constructor.
class SongForm(Form):
artist = CharField()
name = CharField()
# First let's show what happens id empty_permitted=False (the default):
data = {'artist': '', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': [u'This field is required.'], 'artist': [u'This field is required.']})
try:
form.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
# Now let's show what happens when empty_permitted=True and the form is empty.
form = SongForm(data, empty_permitted=True)
self.assertTrue(form.is_valid())
self.assertEqual(form.errors, {})
self.assertEqual(form.cleaned_data, {})
# But if we fill in data for one of the fields, the form is no longer empty and
# the whole thing must pass validation.
data = {'artist': 'The Doors', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': [u'This field is required.']})
try:
form.cleaned_data
self.fail('Attempts to access cleaned_data when validation fails should fail.')
except AttributeError:
pass
# If a field is not given in the data then None is returned for its data. Lets
# make sure that when checking for empty_permitted that None is treated
# accordingly.
data = {'artist': None, 'song': ''}
form = SongForm(data, empty_permitted=True)
self.assertTrue(form.is_valid())
# However, we *really* need to be sure we are checking for None as any data in
# initial that returns False on a boolean call needs to be treated literally.
class PriceForm(Form):
amount = FloatField()
qty = IntegerField()
data = {'amount': '0.0', 'qty': ''}
form = PriceForm(data, initial={'amount': 0.0}, empty_permitted=True)
self.assertTrue(form.is_valid())
def test_extracting_hidden_and_visible(self):
class SongForm(Form):
token = CharField(widget=HiddenInput)
artist = CharField()
name = CharField()
form = SongForm()
self.assertEqual([f.name for f in form.hidden_fields()], ['token'])
self.assertEqual([f.name for f in form.visible_fields()], ['artist', 'name'])
def test_hidden_initial_gets_id(self):
class MyForm(Form):
field1 = CharField(max_length=50, show_hidden_initial=True)
self.assertHTMLEqual(MyForm().as_table(), '<tr><th><label for="id_field1">Field1:</label></th><td><input id="id_field1" type="text" name="field1" maxlength="50" /><input type="hidden" name="initial-field1" id="initial-id_field1" /></td></tr>')
def test_error_html_required_html_classes(self):
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
email = EmailField(required=False)
age = IntegerField()
p = Person({})
p.error_css_class = 'error'
p.required_css_class = 'required'
self.assertHTMLEqual(p.as_ul(), """<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></li>
<li class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></li>
<li><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></li>
<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></li>""")
self.assertHTMLEqual(p.as_p(), """<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" /></p>
<p class="required"><label for="id_is_cool">Is cool:</label> <select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></p>
<p><label for="id_email">Email:</label> <input type="text" name="email" id="id_email" /></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></p>""")
self.assertHTMLEqual(p.as_table(), """<tr class="required error"><th><label for="id_name">Name:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="name" id="id_name" /></td></tr>
<tr class="required"><th><label for="id_is_cool">Is cool:</label></th><td><select name="is_cool" id="id_is_cool">
<option value="1" selected="selected">Unknown</option>
<option value="2">Yes</option>
<option value="3">No</option>
</select></td></tr>
<tr><th><label for="id_email">Email:</label></th><td><input type="text" name="email" id="id_email" /></td></tr>
<tr class="required error"><th><label for="id_age">Age:</label></th><td><ul class="errorlist"><li>This field is required.</li></ul><input type="text" name="age" id="id_age" /></td></tr>""")
def test_label_split_datetime_not_displayed(self):
class EventForm(Form):
happened_at = SplitDateTimeField(widget=widgets.SplitHiddenDateTimeWidget)
form = EventForm()
self.assertHTMLEqual(form.as_ul(), u'<input type="hidden" name="happened_at_0" id="id_happened_at_0" /><input type="hidden" name="happened_at_1" id="id_happened_at_1" />')
def test_multivalue_field_validation(self):
def bad_names(value):
if value == 'bad value':
raise ValidationError('bad value not allowed')
class NameField(MultiValueField):
def __init__(self, fields=(), *args, **kwargs):
fields = (CharField(label='First name', max_length=10),
CharField(label='Last name', max_length=10))
super(NameField, self).__init__(fields=fields, *args, **kwargs)
def compress(self, data_list):
return ' '.join(data_list)
class NameForm(Form):
name = NameField(validators=[bad_names])
form = NameForm(data={'name' : ['bad', 'value']})
form.full_clean()
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': [u'bad value not allowed']})
form = NameForm(data={'name' : ['should be overly', 'long for the field names']})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': [u'Ensure this value has at most 10 characters (it has 16).',
u'Ensure this value has at most 10 characters (it has 24).']})
form = NameForm(data={'name' : ['fname', 'lname']})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'name' : 'fname lname'})
| bsd-3-clause |
tahmid-tanzim/youtube-dl | youtube_dl/extractor/cnet.py | 101 | 3246 | # coding: utf-8
from __future__ import unicode_literals
import json
from .common import InfoExtractor
from ..utils import (
ExtractorError,
)
class CNETIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?cnet\.com/videos/(?P<id>[^/]+)/'
_TESTS = [{
'url': 'http://www.cnet.com/videos/hands-on-with-microsofts-windows-8-1-update/',
'info_dict': {
'id': '56f4ea68-bd21-4852-b08c-4de5b8354c60',
'ext': 'flv',
'title': 'Hands-on with Microsoft Windows 8.1 Update',
'description': 'The new update to the Windows 8 OS brings improved performance for mouse and keyboard users.',
'thumbnail': 're:^http://.*/flmswindows8.jpg$',
'uploader_id': '6085384d-619e-11e3-b231-14feb5ca9861',
'uploader': 'Sarah Mitroff',
},
'params': {
'skip_download': 'requires rtmpdump',
}
}, {
'url': 'http://www.cnet.com/videos/whiny-pothole-tweets-at-local-government-when-hit-by-cars-tomorrow-daily-187/',
'info_dict': {
'id': '56527b93-d25d-44e3-b738-f989ce2e49ba',
'ext': 'flv',
'description': 'Khail and Ashley wonder what other civic woes can be solved by self-tweeting objects, investigate a new kind of VR camera and watch an origami robot self-assemble, walk, climb, dig and dissolve. #TDPothole',
'uploader_id': 'b163284d-6b73-44fc-b3e6-3da66c392d40',
'uploader': 'Ashley Esqueda',
'title': 'Whiny potholes tweet at local government when hit by cars (Tomorrow Daily 187)',
},
'params': {
'skip_download': True, # requires rtmpdump
},
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
data_json = self._html_search_regex(
r"<div class=\"cnetVideoPlayer\"\s+.*?data-cnet-video-options='([^']+)'",
webpage, 'data json')
data = json.loads(data_json)
vdata = data['video']
if not vdata:
vdata = data['videos'][0]
if not vdata:
raise ExtractorError('Cannot find video data')
mpx_account = data['config']['players']['default']['mpx_account']
vid = vdata['files'].get('rtmp', vdata['files']['hds'])
tp_link = 'http://link.theplatform.com/s/%s/%s' % (mpx_account, vid)
video_id = vdata['id']
title = vdata.get('headline')
if title is None:
title = vdata.get('title')
if title is None:
raise ExtractorError('Cannot find title!')
thumbnail = vdata.get('image', {}).get('path')
author = vdata.get('author')
if author:
uploader = '%s %s' % (author['firstName'], author['lastName'])
uploader_id = author.get('id')
else:
uploader = None
uploader_id = None
return {
'_type': 'url_transparent',
'url': tp_link,
'id': video_id,
'display_id': display_id,
'title': title,
'uploader': uploader,
'uploader_id': uploader_id,
'thumbnail': thumbnail,
}
| unlicense |
tumbl3w33d/ansible | lib/ansible/modules/cloud/azure/azure_rm_mariadbserver.py | 39 | 13580 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
# Copyright (c) 2019 Matti Ranta, (@techknowlogick)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mariadbserver
version_added: "2.8"
short_description: Manage MariaDB Server instance
description:
- Create, update and delete instance of MariaDB Server.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
name:
description:
- The name of the server.
required: True
sku:
description:
- The SKU (pricing tier) of the server.
suboptions:
name:
description:
- The name of the SKU, typically, tier + family + cores, for example C(B_Gen4_1), C(GP_Gen5_8).
tier:
description:
- The tier of the particular SKU, for example C(Basic).
choices:
- basic
- standard
capacity:
description:
- The scale up/out capacity, representing server's compute units.
type: int
size:
description:
- The size code, to be interpreted by resource as appropriate.
location:
description:
- Resource location. If not set, location from the resource group will be used as default.
storage_mb:
description:
- The maximum storage allowed for a server.
type: int
version:
description:
- Server version.
choices:
- 10.2
enforce_ssl:
description:
- Enable SSL enforcement.
type: bool
default: False
admin_username:
description:
- The administrator's login name of a server. Can only be specified when the server is being created (and is required for creation).
admin_password:
description:
- The password of the administrator login.
create_mode:
description:
- Create mode of SQL Server.
default: Default
state:
description:
- Assert the state of the MariaDB Server. Use C(present) to create or update a server and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
- Matti Ranta (@techknowlogick)
'''
EXAMPLES = '''
- name: Create (or update) MariaDB Server
azure_rm_mariadbserver:
resource_group: myResourceGroup
name: testserver
sku:
name: B_Gen5_1
tier: Basic
location: eastus
storage_mb: 1024
enforce_ssl: True
version: 10.2
admin_username: cloudsa
admin_password: password
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforMariaDB/servers/mariadbsrv1b6dd89593
version:
description:
- Server version. Possible values include C(10.2).
returned: always
type: str
sample: 10.2
state:
description:
- A state of a server that is visible to user. Possible values include C(Ready), C(Dropping), C(Disabled).
returned: always
type: str
sample: Ready
fully_qualified_domain_name:
description:
- The fully qualified domain name of a server.
returned: always
type: str
sample: mariadbsrv1b6dd89593.mariadb.database.azure.com
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from azure.mgmt.rdbms.mariadb import MariaDBManagementClient
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMMariaDbServers(AzureRMModuleBase):
"""Configuration class for an Azure RM MariaDB Server resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
sku=dict(
type='dict'
),
location=dict(
type='str'
),
storage_mb=dict(
type='int'
),
version=dict(
type='str',
choices=['10.2']
),
enforce_ssl=dict(
type='bool',
default=False
),
create_mode=dict(
type='str',
default='Default'
),
admin_username=dict(
type='str'
),
admin_password=dict(
type='str',
no_log=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.tags = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMMariaDbServers, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
if key == "sku":
ev = kwargs[key]
if 'tier' in ev:
if ev['tier'] == 'basic':
ev['tier'] = 'Basic'
elif ev['tier'] == 'standard':
ev['tier'] = 'Standard'
self.parameters["sku"] = ev
elif key == "location":
self.parameters["location"] = kwargs[key]
elif key == "storage_mb":
self.parameters.setdefault("properties", {}).setdefault("storage_profile", {})["storage_mb"] = kwargs[key]
elif key == "version":
self.parameters.setdefault("properties", {})["version"] = kwargs[key]
elif key == "enforce_ssl":
self.parameters.setdefault("properties", {})["ssl_enforcement"] = 'Enabled' if kwargs[key] else 'Disabled'
elif key == "create_mode":
self.parameters.setdefault("properties", {})["create_mode"] = kwargs[key]
elif key == "admin_username":
self.parameters.setdefault("properties", {})["administrator_login"] = kwargs[key]
elif key == "admin_password":
self.parameters.setdefault("properties", {})["administrator_login_password"] = kwargs[key]
old_response = None
response = None
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_mariadbserver()
if not old_response:
self.log("MariaDB Server instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("MariaDB Server instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if MariaDB Server instance has to be deleted or may be updated")
update_tags, newtags = self.update_tags(old_response.get('tags', {}))
if update_tags:
self.tags = newtags
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the MariaDB Server instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_mariadbserver()
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("MariaDB Server instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_mariadbserver()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_mariadbserver():
time.sleep(20)
else:
self.log("MariaDB Server instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["version"] = response["version"]
self.results["state"] = response["user_visible_state"]
self.results["fully_qualified_domain_name"] = response["fully_qualified_domain_name"]
return self.results
def create_update_mariadbserver(self):
'''
Creates or updates MariaDB Server with the specified configuration.
:return: deserialized MariaDB Server instance state dictionary
'''
self.log("Creating / Updating the MariaDB Server instance {0}".format(self.name))
try:
self.parameters['tags'] = self.tags
if self.to_do == Actions.Create:
response = self.mariadb_client.servers.create(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
else:
# structure of parameters for update must be changed
self.parameters.update(self.parameters.pop("properties", {}))
response = self.mariadb_client.servers.update(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the MariaDB Server instance.')
self.fail("Error creating the MariaDB Server instance: {0}".format(str(exc)))
return response.as_dict()
def delete_mariadbserver(self):
'''
Deletes specified MariaDB Server instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the MariaDB Server instance {0}".format(self.name))
try:
response = self.mariadb_client.servers.delete(resource_group_name=self.resource_group,
server_name=self.name)
except CloudError as e:
self.log('Error attempting to delete the MariaDB Server instance.')
self.fail("Error deleting the MariaDB Server instance: {0}".format(str(e)))
return True
def get_mariadbserver(self):
'''
Gets the properties of the specified MariaDB Server.
:return: deserialized MariaDB Server instance state dictionary
'''
self.log("Checking if the MariaDB Server instance {0} is present".format(self.name))
found = False
try:
response = self.mariadb_client.servers.get(resource_group_name=self.resource_group,
server_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("MariaDB Server instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the MariaDB Server instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMMariaDbServers()
if __name__ == '__main__':
main()
| gpl-3.0 |
394954369/horizon | openstack_dashboard/test/api_tests/network_tests.py | 4 | 24747 | # Copyright 2013 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import itertools
import uuid
from django import http
from mox import IsA # noqa
from novaclient.v1_1 import floating_ip_pools
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class NetworkClientTestCase(test.APITestCase):
def test_networkclient_no_neutron(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(False)
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.nova.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.nova.SecurityGroupManager)
def test_networkclient_neutron(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.neutronclient = self.stub_neutronclient()
self.neutronclient.list_extensions() \
.AndReturn({'extensions': self.api_extensions.list()})
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.neutron.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.neutron.SecurityGroupManager)
def test_networkclient_neutron_with_nova_security_group(self):
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.neutronclient = self.stub_neutronclient()
self.neutronclient.list_extensions().AndReturn({'extensions': []})
self.mox.ReplayAll()
nc = api.network.NetworkClient(self.request)
self.assertIsInstance(nc.floating_ips, api.neutron.FloatingIpManager)
self.assertIsInstance(nc.secgroups, api.nova.SecurityGroupManager)
class NetworkApiNovaTestBase(test.APITestCase):
def setUp(self):
super(NetworkApiNovaTestBase, self).setUp()
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(False)
class NetworkApiNovaSecurityGroupTests(NetworkApiNovaTestBase):
def test_server_update_security_groups(self):
all_secgroups = self.security_groups.list()
added_secgroup = all_secgroups[2]
rm_secgroup = all_secgroups[0]
cur_secgroups_raw = [{'id': sg.id, 'name': sg.name,
'rules': []}
for sg in all_secgroups[0:2]]
cur_secgroups_ret = {'security_groups': cur_secgroups_raw}
new_sg_ids = [sg.id for sg in all_secgroups[1:3]]
instance_id = self.servers.first().id
novaclient = self.stub_novaclient()
novaclient.security_groups = self.mox.CreateMockAnything()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.client = self.mox.CreateMockAnything()
novaclient.security_groups.list().AndReturn(all_secgroups)
url = '/servers/%s/os-security-groups' % instance_id
novaclient.client.get(url).AndReturn((200, cur_secgroups_ret))
novaclient.servers.add_security_group(instance_id, added_secgroup.name)
novaclient.servers.remove_security_group(instance_id, rm_secgroup.name)
self.mox.ReplayAll()
api.network.server_update_security_groups(
self.request, instance_id, new_sg_ids)
class NetworkApiNovaFloatingIpTests(NetworkApiNovaTestBase):
def test_floating_ip_pools_list(self):
pool_names = ['pool1', 'pool2']
pools = [floating_ip_pools.FloatingIPPool(
None, {'name': pool}) for pool in pool_names]
novaclient = self.stub_novaclient()
novaclient.floating_ip_pools = self.mox.CreateMockAnything()
novaclient.floating_ip_pools.list().AndReturn(pools)
self.mox.ReplayAll()
ret = api.network.floating_ip_pools_list(self.request)
self.assertEqual([p.name for p in ret], pool_names)
def test_floating_ip_list(self):
fips = self.api_floating_ips.list()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.list().AndReturn(fips)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_list(self.request)
for r, e in zip(ret, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(r, attr), getattr(e, attr))
self.assertEqual(r.port_id, e.instance_id)
def test_floating_ip_get(self):
fip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.get(fip.id).AndReturn(fip)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip.id)
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(ret, attr), getattr(fip, attr))
self.assertEqual(ret.port_id, fip.instance_id)
def test_floating_ip_allocate(self):
pool_name = 'fip_pool'
fip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.create(pool=pool_name).AndReturn(fip)
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_allocate(self.request, pool_name)
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'instance_id']:
self.assertEqual(getattr(ret, attr), getattr(fip, attr))
self.assertEqual(ret.port_id, fip.instance_id)
def test_floating_ip_release(self):
fip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.floating_ips.delete(fip.id)
self.mox.ReplayAll()
api.network.tenant_floating_ip_release(self.request, fip.id)
def test_floating_ip_associate(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.add_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
api.network.floating_ip_associate(self.request,
floating_ip.id,
server.id)
def test_floating_ip_disassociate(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.api_floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.remove_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
api.network.floating_ip_disassociate(self.request,
floating_ip.id,
server.id)
def test_floating_ip_target_list(self):
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list().AndReturn(servers)
self.mox.ReplayAll()
targets = api.network.floating_ip_target_list(self.request)
for target, server in zip(targets, servers):
self.assertEqual(target.id, server.id)
self.assertEqual(target.name, '%s (%s)' % (server.name, server.id))
def test_floating_ip_target_get_by_instance(self):
self.mox.ReplayAll()
instance_id = self.servers.first().id
ret = api.network.floating_ip_target_get_by_instance(self.request,
instance_id)
self.assertEqual(instance_id, ret)
class NetworkApiNeutronTestBase(test.APITestCase):
def setUp(self):
super(NetworkApiNeutronTestBase, self).setUp()
self.mox.StubOutWithMock(api.base, 'is_service_enabled')
api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \
.AndReturn(True)
self.qclient = self.stub_neutronclient()
self.qclient.list_extensions() \
.AndReturn({'extensions': self.api_extensions.list()})
class NetworkApiNeutronSecurityGroupTests(NetworkApiNeutronTestBase):
def setUp(self):
super(NetworkApiNeutronSecurityGroupTests, self).setUp()
self.sg_dict = dict([(sg['id'], sg['name']) for sg
in self.api_q_secgroups.list()])
def _cmp_sg_rule(self, exprule, retrule):
self.assertEqual(exprule['id'], retrule.id)
self.assertEqual(exprule['security_group_id'],
retrule.parent_group_id)
self.assertEqual(exprule['direction'], retrule.direction)
self.assertEqual(exprule['ethertype'], retrule.ethertype)
self.assertEqual(exprule['port_range_min'], retrule.from_port)
self.assertEqual(exprule['port_range_max'], retrule.to_port)
if (exprule['remote_ip_prefix'] is None and
exprule['remote_group_id'] is None):
expcidr = ('::/0' if exprule['ethertype'] == 'IPv6'
else '0.0.0.0/0')
else:
expcidr = exprule['remote_ip_prefix']
self.assertEqual(expcidr, retrule.ip_range.get('cidr'))
self.assertEqual(self.sg_dict.get(exprule['remote_group_id']),
retrule.group.get('name'))
def _cmp_sg(self, exp_sg, ret_sg):
self.assertEqual(exp_sg['id'], ret_sg.id)
self.assertEqual(exp_sg['name'], ret_sg.name)
exp_rules = exp_sg['security_group_rules']
self.assertEqual(len(exp_rules), len(ret_sg.rules))
for (exprule, retrule) in itertools.izip(exp_rules, ret_sg.rules):
self._cmp_sg_rule(exprule, retrule)
def test_security_group_list(self):
sgs = self.api_q_secgroups.list()
tenant_id = self.request.user.tenant_id
# use deepcopy to ensure self.api_q_secgroups is not modified.
self.qclient.list_security_groups(tenant_id=tenant_id) \
.AndReturn({'security_groups': copy.deepcopy(sgs)})
self.mox.ReplayAll()
rets = api.network.security_group_list(self.request)
self.assertEqual(len(sgs), len(rets))
for (exp, ret) in itertools.izip(sgs, rets):
self._cmp_sg(exp, ret)
def test_security_group_get(self):
secgroup = self.api_q_secgroups.first()
sg_ids = set([secgroup['id']] +
[rule['remote_group_id'] for rule
in secgroup['security_group_rules']
if rule['remote_group_id']])
related_sgs = [sg for sg in self.api_q_secgroups.list()
if sg['id'] in sg_ids]
# use deepcopy to ensure self.api_q_secgroups is not modified.
self.qclient.show_security_group(secgroup['id']) \
.AndReturn({'security_group': copy.deepcopy(secgroup)})
self.qclient.list_security_groups(id=sg_ids, fields=['id', 'name']) \
.AndReturn({'security_groups': related_sgs})
self.mox.ReplayAll()
ret = api.network.security_group_get(self.request, secgroup['id'])
self._cmp_sg(secgroup, ret)
def test_security_group_create(self):
secgroup = self.api_q_secgroups.list()[1]
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description']}}
self.qclient.create_security_group(body) \
.AndReturn({'security_group': copy.deepcopy(secgroup)})
self.mox.ReplayAll()
ret = api.network.security_group_create(self.request, secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
def test_security_group_update(self):
secgroup = self.api_q_secgroups.list()[1]
secgroup = copy.deepcopy(secgroup)
secgroup['name'] = 'newname'
secgroup['description'] = 'new description'
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description']}}
self.qclient.update_security_group(secgroup['id'], body) \
.AndReturn({'security_group': secgroup})
self.mox.ReplayAll()
ret = api.network.security_group_update(self.request,
secgroup['id'],
secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
def test_security_group_delete(self):
secgroup = self.api_q_secgroups.first()
self.qclient.delete_security_group(secgroup['id'])
self.mox.ReplayAll()
api.network.security_group_delete(self.request, secgroup['id'])
def test_security_group_rule_create(self):
sg_rule = [r for r in self.api_q_secgroup_rules.list()
if r['protocol'] == 'tcp' and r['remote_ip_prefix']][0]
sg_id = sg_rule['security_group_id']
secgroup = [sg for sg in self.api_q_secgroups.list()
if sg['id'] == sg_id][0]
post_rule = copy.deepcopy(sg_rule)
del post_rule['id']
del post_rule['tenant_id']
post_body = {'security_group_rule': post_rule}
self.qclient.create_security_group_rule(post_body) \
.AndReturn({'security_group_rule': copy.deepcopy(sg_rule)})
self.qclient.list_security_groups(id=set([sg_id]),
fields=['id', 'name']) \
.AndReturn({'security_groups': [copy.deepcopy(secgroup)]})
self.mox.ReplayAll()
ret = api.network.security_group_rule_create(
self.request, sg_rule['security_group_id'],
sg_rule['direction'], sg_rule['ethertype'], sg_rule['protocol'],
sg_rule['port_range_min'], sg_rule['port_range_max'],
sg_rule['remote_ip_prefix'], sg_rule['remote_group_id'])
self._cmp_sg_rule(sg_rule, ret)
def test_security_group_rule_delete(self):
sg_rule = self.api_q_secgroup_rules.first()
self.qclient.delete_security_group_rule(sg_rule['id'])
self.mox.ReplayAll()
api.network.security_group_rule_delete(self.request, sg_rule['id'])
def _get_instance(self, cur_sg_ids):
instance_port = [p for p in self.api_ports.list()
if p['device_owner'].startswith('compute:')][0]
instance_id = instance_port['device_id']
# Emulate an intance with two ports
instance_ports = []
for _i in range(2):
p = copy.deepcopy(instance_port)
p['id'] = str(uuid.uuid4())
p['security_groups'] = cur_sg_ids
instance_ports.append(p)
return (instance_id, instance_ports)
def test_server_security_groups(self):
cur_sg_ids = [sg['id'] for sg in self.api_q_secgroups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports(device_id=instance_id) \
.AndReturn({'ports': instance_ports})
secgroups = copy.deepcopy(self.api_q_secgroups.list())
self.qclient.list_security_groups(id=set(cur_sg_ids)) \
.AndReturn({'security_groups': secgroups})
self.mox.ReplayAll()
api.network.server_security_groups(self.request, instance_id)
def test_server_update_security_groups(self):
cur_sg_ids = [self.api_q_secgroups.first()['id']]
new_sg_ids = [sg['id'] for sg in self.api_q_secgroups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports(device_id=instance_id) \
.AndReturn({'ports': instance_ports})
for p in instance_ports:
body = {'port': {'security_groups': new_sg_ids}}
self.qclient.update_port(p['id'], body=body).AndReturn({'port': p})
self.mox.ReplayAll()
api.network.server_update_security_groups(
self.request, instance_id, new_sg_ids)
def test_security_group_backend(self):
self.mox.ReplayAll()
self.assertEqual(api.network.security_group_backend(self.request),
'neutron')
class NetworkApiNeutronFloatingIpTests(NetworkApiNeutronTestBase):
def test_floating_ip_pools_list(self):
search_opts = {'router:external': True}
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
self.qclient.list_networks(**search_opts) \
.AndReturn({'networks': ext_nets})
self.mox.ReplayAll()
rets = api.network.floating_ip_pools_list(self.request)
for attr in ['id', 'name']:
self.assertEqual([getattr(p, attr) for p in rets],
[p[attr] for p in ext_nets])
def test_floating_ip_list(self):
fips = self.api_q_floating_ips.list()
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_floatingips(**filters) \
.AndReturn({'floatingips': fips})
self.qclient.list_ports(**filters) \
.AndReturn({'ports': self.api_ports.list()})
self.mox.ReplayAll()
rets = api.network.tenant_floating_ip_list(self.request)
assoc_port = self.api_ports.list()[1]
self.assertEqual(len(fips), len(rets))
for ret, exp in zip(rets, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), exp[attr])
if exp['port_id']:
dev_id = assoc_port['device_id'] if exp['port_id'] else None
self.assertEqual(ret.instance_id, dev_id)
def test_floating_ip_get_associated(self):
fip = self.api_q_floating_ips.list()[1]
assoc_port = self.api_ports.list()[1]
self.qclient.show_floatingip(fip['id']).AndReturn({'floatingip': fip})
self.qclient.show_port(assoc_port['id']) \
.AndReturn({'port': assoc_port})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), fip[attr])
self.assertEqual(ret.instance_id, assoc_port['device_id'])
def test_floating_ip_get_unassociated(self):
fip = self.api_q_floating_ips.list()[0]
self.qclient.show_floatingip(fip['id']).AndReturn({'floatingip': fip})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), fip[attr])
self.assertIsNone(ret.instance_id)
def test_floating_ip_allocate(self):
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
ext_net = ext_nets[0]
fip = self.api_q_floating_ips.first()
self.qclient.create_floatingip(
{'floatingip': {'floating_network_id': ext_net['id']}}) \
.AndReturn({'floatingip': fip})
self.mox.ReplayAll()
ret = api.network.tenant_floating_ip_allocate(self.request,
ext_net['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), fip[attr])
self.assertIsNone(ret.instance_id)
def test_floating_ip_release(self):
fip = self.api_q_floating_ips.first()
self.qclient.delete_floatingip(fip['id'])
self.mox.ReplayAll()
api.network.tenant_floating_ip_release(self.request, fip['id'])
def test_floating_ip_associate(self):
fip = self.api_q_floating_ips.list()[1]
assoc_port = self.api_ports.list()[1]
ip_address = assoc_port['fixed_ips'][0]['ip_address']
target_id = '%s_%s' % (assoc_port['id'], ip_address)
params = {'port_id': assoc_port['id'],
'fixed_ip_address': ip_address}
self.qclient.update_floatingip(fip['id'],
{'floatingip': params})
self.mox.ReplayAll()
api.network.floating_ip_associate(self.request, fip['id'], target_id)
def test_floating_ip_disassociate(self):
fip = self.api_q_floating_ips.list()[1]
assoc_port = self.api_ports.list()[1]
ip_address = assoc_port['fixed_ips'][0]['ip_address']
target_id = '%s_%s' % (assoc_port['id'], ip_address)
self.qclient.update_floatingip(fip['id'],
{'floatingip': {'port_id': None}})
self.mox.ReplayAll()
api.network.floating_ip_disassociate(self.request, fip['id'],
target_id)
def _get_target_id(self, port):
param = {'id': port['id'],
'addr': port['fixed_ips'][0]['ip_address']}
return '%(id)s_%(addr)s' % param
def _get_target_name(self, port):
param = {'svrid': port['device_id'],
'addr': port['fixed_ips'][0]['ip_address']}
return 'server_%(svrid)s: %(addr)s' % param
def test_floating_ip_target_list(self):
ports = self.api_ports.list()
target_ports = [(self._get_target_id(p),
self._get_target_name(p)) for p in ports
if not p['device_owner'].startswith('network:')]
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_ports(**filters).AndReturn({'ports': ports})
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
search_opts = {'project_id': self.request.user.tenant_id}
novaclient.servers.list(True, search_opts).AndReturn(servers)
self.mox.ReplayAll()
rets = api.network.floating_ip_target_list(self.request)
self.assertEqual(len(rets), len(target_ports))
for ret, exp in zip(rets, target_ports):
self.assertEqual(ret.id, exp[0])
self.assertEqual(ret.name, exp[1])
def test_floating_ip_target_get_by_instance(self):
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == '1']
search_opts = {'device_id': '1'}
self.qclient.list_ports(**search_opts).AndReturn({'ports': candidates})
self.mox.ReplayAll()
ret = api.network.floating_ip_target_get_by_instance(self.request, '1')
self.assertEqual(ret, self._get_target_id(candidates[0]))
def test_target_floating_ip_port_by_instance(self):
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == '1']
search_opts = {'device_id': '1'}
self.qclient.list_ports(**search_opts).AndReturn({'ports': candidates})
self.mox.ReplayAll()
ret = api.network.floating_ip_target_list_by_instance(self.request,
'1')
self.assertEqual(ret[0], self._get_target_id(candidates[0]))
self.assertEqual(len(ret), len(candidates))
| apache-2.0 |
simongoffin/my_odoo_tutorial | addons/hr_timesheet_invoice/wizard/hr_timesheet_invoice_create.py | 337 | 3655 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_timesheet_invoice_create(osv.osv_memory):
_name = 'hr.timesheet.invoice.create'
_description = 'Create invoice from timesheet'
_columns = {
'date': fields.boolean('Date', help='The real date of each work will be displayed on the invoice'),
'time': fields.boolean('Time spent', help='The time of each work done will be displayed on the invoice'),
'name': fields.boolean('Description', help='The detail of each work done will be displayed on the invoice'),
'price': fields.boolean('Cost', help='The cost of each work done will be displayed on the invoice. You probably don\'t want to check this'),
'product': fields.many2one('product.product', 'Force Product', help='Fill this field only if you want to force to use a specific product. Keep empty to use the real product that comes from the cost.'),
}
_defaults = {
'date': 1,
'name': 1,
}
def view_init(self, cr, uid, fields, context=None):
"""
This function checks for precondition before wizard executes
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param fields: List of fields for default value
@param context: A standard dictionary for contextual values
"""
analytic_obj = self.pool.get('account.analytic.line')
data = context and context.get('active_ids', [])
for analytic in analytic_obj.browse(cr, uid, data, context=context):
if analytic.invoice_id:
raise osv.except_osv(_('Warning!'), _("Invoice is already linked to some of the analytic line(s)!"))
def do_create(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, context=context)[0]
# Create an invoice based on selected timesheet lines
invs = self.pool.get('account.analytic.line').invoice_cost_create(cr, uid, context['active_ids'], data, context=context)
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context)
res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)[0]['res_id']
act_win = act_obj.read(cr, uid, [res_id], context=context)[0]
act_win['domain'] = [('id','in',invs),('type','=','out_invoice')]
act_win['name'] = _('Invoices')
return act_win
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dejay313/dojostreams | script.module.urlresolver/lib/urlresolver/plugins/weshare.py | 7 | 1762 | """
grifthost urlresolver plugin
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from lib import helpers
from urlresolver import common
from urlresolver.resolver import UrlResolver, ResolverError
class WeShareResolver(UrlResolver):
name = "weshare.me"
domains = ["weshare.me"]
pattern = '(?://|\.)(weshare\.me)/(?:services/mediaplayer/site/_embed(?:\.max)?\.php\?u=)?([A-Za-z0-9]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
headers = {'User-Agent': common.FF_USER_AGENT}
html = self.net.http_GET(web_url, headers=headers).content
sources = helpers.scrape_sources(html)
if not sources:
match = re.search('''href="([^"]+)[^>]+>\(download\)''', html, re.DOTALL)
if match:
sources = [('Download', match.group(1))]
headers['Referer'] = web_url
return helpers.pick_source(sources) + helpers.append_headers(headers)
def get_url(self, host, media_id):
return 'https://weshare.me/services/mediaplayer/site/_embed.max.php?u=%s' % (media_id)
| gpl-2.0 |
foreni-packages/urwid | urwid/tests/test_graphics.py | 22 | 3925 | import unittest
from urwid import graphics
from urwid.compat import B
import urwid
class LineBoxTest(unittest.TestCase):
def border(self, tl, t, tr, l, r, bl, b, br):
return [bytes().join([tl, t, tr]),
bytes().join([l, B(" "), r]),
bytes().join([bl, b, br]),]
def test_linebox_border(self):
urwid.set_encoding("utf-8")
t = urwid.Text("")
l = urwid.LineBox(t).render((3,)).text
# default
self.assertEqual(l,
self.border(B("\xe2\x94\x8c"), B("\xe2\x94\x80"),
B("\xe2\x94\x90"), B("\xe2\x94\x82"), B("\xe2\x94\x82"),
B("\xe2\x94\x94"), B("\xe2\x94\x80"), B("\xe2\x94\x98")))
nums = [B(str(n)) for n in range(8)]
b = dict(zip(["tlcorner", "tline", "trcorner", "lline", "rline",
"blcorner", "bline", "brcorner"], nums))
l = urwid.LineBox(t, **b).render((3,)).text
self.assertEqual(l, self.border(*nums))
class BarGraphTest(unittest.TestCase):
def bgtest(self, desc, data, top, widths, maxrow, exp ):
rval = graphics.calculate_bargraph_display(data,top,widths,maxrow)
assert rval == exp, "%s expected %r, got %r"%(desc,exp,rval)
def test1(self):
self.bgtest('simplest',[[0]],5,[1],1,
[(1,[(0,1)])] )
self.bgtest('simpler',[[0],[0]],5,[1,2],5,
[(5,[(0,3)])] )
self.bgtest('simple',[[5]],5,[1],1,
[(1,[(1,1)])] )
self.bgtest('2col-1',[[2],[0]],5,[1,2],5,
[(3,[(0,3)]), (2,[(1,1),(0,2)]) ] )
self.bgtest('2col-2',[[0],[2]],5,[1,2],5,
[(3,[(0,3)]), (2,[(0,1),(1,2)]) ] )
self.bgtest('2col-3',[[2],[3]],5,[1,2],5,
[(2,[(0,3)]), (1,[(0,1),(1,2)]), (2,[(1,3)]) ] )
self.bgtest('3col-1',[[5],[3],[0]],5,[2,1,1],5,
[(2,[(1,2),(0,2)]), (3,[(1,3),(0,1)]) ] )
self.bgtest('3col-2',[[4],[4],[4]],5,[2,1,1],5,
[(1,[(0,4)]), (4,[(1,4)]) ] )
self.bgtest('3col-3',[[1],[2],[3]],5,[2,1,1],5,
[(2,[(0,4)]), (1,[(0,3),(1,1)]), (1,[(0,2),(1,2)]),
(1,[(1,4)]) ] )
self.bgtest('3col-4',[[4],[2],[4]],5,[1,2,1],5,
[(1,[(0,4)]), (2,[(1,1),(0,2),(1,1)]), (2,[(1,4)]) ] )
def test2(self):
self.bgtest('simple1a',[[2,0],[2,1]],2,[1,1],2,
[(1,[(1,2)]),(1,[(1,1),(2,1)]) ] )
self.bgtest('simple1b',[[2,1],[2,0]],2,[1,1],2,
[(1,[(1,2)]),(1,[(2,1),(1,1)]) ] )
self.bgtest('cross1a',[[2,2],[1,2]],2,[1,1],2,
[(2,[(2,2)]) ] )
self.bgtest('cross1b',[[1,2],[2,2]],2,[1,1],2,
[(2,[(2,2)]) ] )
self.bgtest('mix1a',[[3,2,1],[2,2,2],[1,2,3]],3,[1,1,1],3,
[(1,[(1,1),(0,1),(3,1)]),(1,[(2,1),(3,2)]),
(1,[(3,3)]) ] )
self.bgtest('mix1b',[[1,2,3],[2,2,2],[3,2,1]],3,[1,1,1],3,
[(1,[(3,1),(0,1),(1,1)]),(1,[(3,2),(2,1)]),
(1,[(3,3)]) ] )
class SmoothBarGraphTest(unittest.TestCase):
def sbgtest(self, desc, data, top, exp ):
urwid.set_encoding('utf-8')
g = urwid.BarGraph( ['black','red','blue'],
None, {(1,0):'red/black', (2,1):'blue/red'})
g.set_data( data, top )
rval = g.calculate_display((5,3))
assert rval == exp, "%s expected %r, got %r"%(desc,exp,rval)
def test1(self):
self.sbgtest('simple', [[3]], 5,
[(1, [(0, 5)]), (1, [((1, 0, 6), 5)]), (1, [(1, 5)])] )
self.sbgtest('boring', [[4,2]], 6,
[(1, [(0, 5)]), (1, [(1, 5)]), (1, [(2,5)]) ] )
self.sbgtest('two', [[4],[2]], 6,
[(1, [(0, 5)]), (1, [(1, 3), (0, 2)]), (1, [(1, 5)]) ] )
self.sbgtest('twos', [[3],[4]], 6,
[(1, [(0, 5)]), (1, [((1,0,4), 3), (1, 2)]), (1, [(1,5)]) ] )
self.sbgtest('twof', [[4],[3]], 6,
[(1, [(0, 5)]), (1, [(1,3), ((1,0,4), 2)]), (1, [(1,5)]) ] )
| lgpl-2.1 |
mheap/ansible | lib/ansible/plugins/cache/yaml.py | 15 | 1924 | # (c) 2017, Brian Coca
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: yaml
short_description: YAML formatted files.
description:
- This cache uses YAML formatted, per host, files saved to the filesystem.
version_added: "2.3"
author: Brian Coca (@bcoca)
options:
_uri:
required: True
description:
- Path in which the cache plugin will save the files
type: list
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the files
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout for the cache plugin data
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import codecs
import yaml
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.cache import BaseFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by yaml files.
"""
def _load(self, filepath):
with codecs.open(filepath, 'r', encoding='utf-8') as f:
return AnsibleLoader(f).get_single_data()
def _dump(self, value, filepath):
with codecs.open(filepath, 'w', encoding='utf-8') as f:
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)
| gpl-3.0 |
iLoveTux/unitils | test/test_cp.py | 1 | 3760 | import os
import unittest
import unitils
import platform
try:
from unittest import mock
except ImportError:
import mock
class TestCpCli(unittest.TestCase):
"""Test the functionality of the cp.py cli
"""
@mock.patch("unitils.cp")
def test_only_needs_src_and_dst(self, mock_cp):
argv = ["/tmp/doesnt.exist", "/tmp/still.not"]
unitils.cli.cp(argv=argv)
mock_cp.assert_called_with(src="/tmp/doesnt.exist",
dst="/tmp/still.not",
recursive=False,
no_clobber=False)
@mock.patch("unitils.cp")
def test_respects_recursive_flag(self, mock_cp):
argv = ["--recursive", "/tmp/doesnt.exist", "/tmp/still.not"]
unitils.cli.cp(argv=argv)
mock_cp.assert_called_with(src="/tmp/doesnt.exist",
dst="/tmp/still.not",
recursive=True,
no_clobber=False)
@mock.patch("unitils.cp")
def test_respects_noclobber_flag(self, mock_cp):
argv = ["--no-clobber", "/tmp/doesnt.exist", "/tmp/still.not"]
unitils.cli.cp(argv=argv)
mock_cp.assert_called_with(src="/tmp/doesnt.exist",
dst="/tmp/still.not",
recursive=False,
no_clobber=True)
@mock.patch("unitils.cp")
def test_respects_noclobber_and_recursive_flags(self, mock_cp):
argv = ["--no-clobber",
"--recursive",
"/tmp/doesnt.exist",
"/tmp/still.not"]
unitils.cli.cp(argv=argv)
mock_cp.assert_called_with(src="/tmp/doesnt.exist",
dst="/tmp/still.not",
recursive=True,
no_clobber=True)
class TestCp(unittest.TestCase):
"""Test the functionality of the cp python callable
"""
@mock.patch("shutil.copy", return_value=None)
def test_shutil_copy_gets_called(self, mock_copy):
src, dst = "/tmp/doesnt.exist", "/tmp/still.not"
unitils.cp(src, dst)
mock_copy.assert_called_with(src=src, dst=dst)
@mock.patch("shutil.copy", return_value=None)
@mock.patch("os.path.exists", return_value=True)
@mock.patch("os.path.isfile", return_value=True)
def test_shutil_copy_doesnt_get_called_if_no_clobber(self,
mock_isfile,
mock_exists,
mock_copy):
src, dst = "/tmp/doesnt.exist", "/tmp/still.not"
unitils.cp(src, dst, no_clobber=True)
self.assertFalse(mock_copy.called)
@mock.patch("shutil.copy", return_value=None)
@mock.patch("os.path.exists", return_value=False)
@mock.patch("os.path.isfile", return_value=False)
def test_no_clobber_shutil_copy_gets_called_if_doesnt_exist(self,
mock_isfile,
mock_exists,
mock_copy):
src, dst = "/tmp/doesnt.exist", "/tmp/still.not"
unitils.cp(src, dst, no_clobber=True)
mock_copy.assert_called_with(src=src, dst=dst)
@mock.patch("shutil.copytree", return_value=None)
def test_shutil_copytree_gets_called_if_recursive_is_True(self, mock_copytree):
src, dst = "/tmp/doesnt.exist/", "/tmp/still.not"
unitils.cp(src, dst, recursive=True)
mock_copytree.assert_called_with(src=src, dst=dst)
| gpl-3.0 |
SnappleCap/oh-mainline | vendor/packages/PyYaml/tests/lib3/test_tokens.py | 62 | 2257 |
import yaml
import pprint
# Tokens mnemonic:
# directive: %
# document_start: ---
# document_end: ...
# alias: *
# anchor: &
# tag: !
# scalar _
# block_sequence_start: [[
# block_mapping_start: {{
# block_end: ]}
# flow_sequence_start: [
# flow_sequence_end: ]
# flow_mapping_start: {
# flow_mapping_end: }
# entry: ,
# key: ?
# value: :
_replaces = {
yaml.DirectiveToken: '%',
yaml.DocumentStartToken: '---',
yaml.DocumentEndToken: '...',
yaml.AliasToken: '*',
yaml.AnchorToken: '&',
yaml.TagToken: '!',
yaml.ScalarToken: '_',
yaml.BlockSequenceStartToken: '[[',
yaml.BlockMappingStartToken: '{{',
yaml.BlockEndToken: ']}',
yaml.FlowSequenceStartToken: '[',
yaml.FlowSequenceEndToken: ']',
yaml.FlowMappingStartToken: '{',
yaml.FlowMappingEndToken: '}',
yaml.BlockEntryToken: ',',
yaml.FlowEntryToken: ',',
yaml.KeyToken: '?',
yaml.ValueToken: ':',
}
def test_tokens(data_filename, tokens_filename, verbose=False):
tokens1 = []
tokens2 = open(tokens_filename, 'r').read().split()
try:
for token in yaml.scan(open(data_filename, 'rb')):
if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
tokens1.append(_replaces[token.__class__])
finally:
if verbose:
print("TOKENS1:", ' '.join(tokens1))
print("TOKENS2:", ' '.join(tokens2))
assert len(tokens1) == len(tokens2), (tokens1, tokens2)
for token1, token2 in zip(tokens1, tokens2):
assert token1 == token2, (token1, token2)
test_tokens.unittest = ['.data', '.tokens']
def test_scanner(data_filename, canonical_filename, verbose=False):
for filename in [data_filename, canonical_filename]:
tokens = []
try:
for token in yaml.scan(open(filename, 'rb')):
tokens.append(token.__class__.__name__)
finally:
if verbose:
pprint.pprint(tokens)
test_scanner.unittest = ['.data', '.canonical']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
| agpl-3.0 |
azureplus/hue | desktop/core/ext-py/Django-1.6.10/django/test/utils.py | 105 | 15228 | from contextlib import contextmanager
import logging
import re
import sys
from threading import local
import time
import warnings
from functools import wraps
from xml.dom.minidom import parseString, Node
from django.conf import settings, UserSettingsHolder
from django.core import mail
from django.core.signals import request_started
from django.db import reset_queries
from django.http import request
from django.template import Template, loader, TemplateDoesNotExist
from django.template.loaders import cached
from django.test.signals import template_rendered, setting_changed
from django.utils.encoding import force_str
from django.utils import six
from django.utils.translation import deactivate
from django.utils.unittest import skipUnless
__all__ = (
'Approximate', 'ContextList', 'get_runner', 'override_settings',
'requires_tz_support', 'setup_test_environment', 'teardown_test_environment',
)
RESTORE_LOADERS_ATTR = '_original_template_source_loaders'
TZ_SUPPORT = hasattr(time, 'tzset')
class Approximate(object):
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
if self.val == other:
return True
return round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, six.string_types):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super(ContextList, self).__getitem__(key)
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
keys = set()
for subcontext in self:
for dict in subcontext:
keys |= set(dict.keys())
return keys
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""Perform any global pre-test setup. This involves:
- Installing the instrumented test renderer
- Set the email backend to the locmem email backend.
- Setting the active locale to match the LANGUAGE_CODE setting.
"""
Template._original_render = Template._render
Template._render = instrumented_test_render
# Storing previous values in the settings module itself is problematic.
# Store them in arbitrary (but related) modules instead. See #20636.
mail._original_email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
request._original_allowed_hosts = settings.ALLOWED_HOSTS
settings.ALLOWED_HOSTS = ['*']
mail.outbox = []
deactivate()
def teardown_test_environment():
"""Perform any global post-test teardown. This involves:
- Restoring the original test renderer
- Restoring the email sending functions
"""
Template._render = Template._original_render
del Template._original_render
settings.EMAIL_BACKEND = mail._original_email_backend
del mail._original_email_backend
settings.ALLOWED_HOSTS = request._original_allowed_hosts
del request._original_allowed_hosts
del mail.outbox
warn_txt = ("get_warnings_state/restore_warnings_state functions from "
"django.test.utils are deprecated. Use Python's warnings.catch_warnings() "
"context manager instead.")
def get_warnings_state():
"""
Returns an object containing the state of the warnings module
"""
# There is no public interface for doing this, but this implementation of
# get_warnings_state and restore_warnings_state appears to work on Python
# 2.4 to 2.7.
warnings.warn(warn_txt, DeprecationWarning, stacklevel=2)
return warnings.filters[:]
def restore_warnings_state(state):
"""
Restores the state of the warnings module when passed an object that was
returned by get_warnings_state()
"""
warnings.warn(warn_txt, DeprecationWarning, stacklevel=2)
warnings.filters = state[:]
def get_runner(settings, test_runner_class=None):
if not test_runner_class:
test_runner_class = settings.TEST_RUNNER
test_path = test_runner_class.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, force_str(test_path[-1]))
test_runner = getattr(test_module, test_path[-1])
return test_runner
def setup_test_template_loader(templates_dict, use_cached_loader=False):
"""
Changes Django to only find templates from within a dictionary (where each
key is the template name and each value is the corresponding template
content to return).
Use meth:`restore_template_loaders` to restore the original loaders.
"""
if hasattr(loader, RESTORE_LOADERS_ATTR):
raise Exception("loader.%s already exists" % RESTORE_LOADERS_ATTR)
def test_template_loader(template_name, template_dirs=None):
"A custom template loader that loads templates from a dictionary."
try:
return (templates_dict[template_name], "test:%s" % template_name)
except KeyError:
raise TemplateDoesNotExist(template_name)
if use_cached_loader:
template_loader = cached.Loader(('test_template_loader',))
template_loader._cached_loaders = (test_template_loader,)
else:
template_loader = test_template_loader
setattr(loader, RESTORE_LOADERS_ATTR, loader.template_source_loaders)
loader.template_source_loaders = (template_loader,)
return template_loader
def restore_template_loaders():
"""
Restores the original template loaders after
:meth:`setup_test_template_loader` has been run.
"""
loader.template_source_loaders = getattr(loader, RESTORE_LOADERS_ATTR)
delattr(loader, RESTORE_LOADERS_ATTR)
class override_settings(object):
"""
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
"""
def __init__(self, **kwargs):
self.options = kwargs
def __enter__(self):
self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def __call__(self, test_func):
from django.test import SimpleTestCase
if isinstance(test_func, type):
if not issubclass(test_func, SimpleTestCase):
raise Exception(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings")
original_pre_setup = test_func._pre_setup
original_post_teardown = test_func._post_teardown
def _pre_setup(innerself):
self.enable()
original_pre_setup(innerself)
def _post_teardown(innerself):
original_post_teardown(innerself)
self.disable()
test_func._pre_setup = _pre_setup
test_func._post_teardown = _post_teardown
return test_func
else:
@wraps(test_func)
def inner(*args, **kwargs):
with self:
return test_func(*args, **kwargs)
return inner
def enable(self):
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value)
def disable(self):
settings._wrapped = self.wrapped
del self.wrapped
for key in self.options:
new_value = getattr(settings, key, None)
setting_changed.send(sender=settings._wrapped.__class__,
setting=key, value=new_value)
def compare_xml(want, got):
"""Tries to do a 'xml-comparison' of want and got. Plain string
comparison doesn't always work because, for example, attribute
ordering should not be important. Comment nodes are not considered in the
comparison.
Based on http://codespeak.net/svn/lxml/trunk/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+')
def norm_whitespace(v):
return _norm_whitespace_re.sub(' ', v)
def child_text(element):
return ''.join([c.data for c in element.childNodes
if c.nodeType == Node.TEXT_NODE])
def children(element):
return [c for c in element.childNodes
if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
for want, got in zip(want_children, got_children):
if not check_element(want, got):
return False
return True
def first_node(document):
for node in document.childNodes:
if node.nodeType != Node.COMMENT_NODE:
return node
want, got = strip_quotes(want, got)
want = want.replace('\\n','\n')
got = got.replace('\\n','\n')
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith('<?xml'):
wrapper = '<root>%s</root>'
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
def strip_quotes(want, got):
"""
Strip quotes of doctests output values:
>>> strip_quotes("'foo'")
"foo"
>>> strip_quotes('"foo"')
"foo"
"""
def is_quoted_string(s):
s = s.strip()
return (len(s) >= 2
and s[0] == s[-1]
and s[0] in ('"', "'"))
def is_quoted_unicode(s):
s = s.strip()
return (len(s) >= 3
and s[0] == 'u'
and s[1] == s[-1]
and s[1] in ('"', "'"))
if is_quoted_string(want) and is_quoted_string(got):
want = want.strip()[1:-1]
got = got.strip()[1:-1]
elif is_quoted_unicode(want) and is_quoted_unicode(got):
want = want.strip()[2:-1]
got = got.strip()[2:-1]
return want, got
def str_prefix(s):
return s % {'_': '' if six.PY3 else 'u'}
class CaptureQueriesContext(object):
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries:self.final_queries]
def __enter__(self):
self.use_debug_cursor = self.connection.use_debug_cursor
self.connection.use_debug_cursor = True
self.initial_queries = len(self.connection.queries)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.use_debug_cursor = self.use_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries)
class IgnoreDeprecationWarningsMixin(object):
warning_class = DeprecationWarning
def setUp(self):
super(IgnoreDeprecationWarningsMixin, self).setUp()
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
warnings.filterwarnings("ignore", category=self.warning_class)
def tearDown(self):
self.catch_warnings.__exit__(*sys.exc_info())
super(IgnoreDeprecationWarningsMixin, self).tearDown()
class IgnorePendingDeprecationWarningsMixin(IgnoreDeprecationWarningsMixin):
warning_class = PendingDeprecationWarning
@contextmanager
def patch_logger(logger_name, log_level):
"""
Context manager that takes a named logger and the logging level
and provides a simple mock-like list of messages received
"""
calls = []
def replacement(msg):
calls.append(msg)
logger = logging.getLogger(logger_name)
orig = getattr(logger, log_level)
setattr(logger, log_level, replacement)
try:
yield calls
finally:
setattr(logger, log_level, orig)
class TransRealMixin(object):
"""This is the only way to reset the translation machinery. Otherwise
the test suite occasionally fails because of global state pollution
between tests."""
def flush_caches(self):
from django.utils.translation import trans_real
trans_real._translations = {}
trans_real._active = local()
trans_real._default = None
trans_real._accepted = {}
trans_real._checked_languages = {}
def tearDown(self):
self.flush_caches()
super(TransRealMixin, self).tearDown()
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that.")
| apache-2.0 |
MFry/pyAlgoDataStructures | hacker_rank/WoC_24/simplified_chess_engine.py | 1 | 2391 | """
ref: https://www.hackerrank.com/contests/w24/challenges/simplified-chess-engine
"""
import unittest
def get_moves(piece, r, c, board):
def check_valid(x, y):
"""
:param board: playing board
:param c: pieces color
:param x: coord
:param y: coord
:return: boolean
"""
if board[x][y] and board[x][y]['color'] == piece['color']:
return False
return True
def check_bounds(x, y):
if x < 0 or y < 0:
return False
if x >= len(board) or y >= len(board):
return False
return True
def gen_moves(moves):
valid_moves = []
for move in moves:
x, y = r, c
while True:
x, y = x + move[0], y + move[1]
if not check_bounds(x, y) or check_valid(x, y):
break
valid_moves.append((x, y))
if board[x][y] != piece['color']:
break
return valid_moves
moved = []
knight_moves = [(2, 1), (2, -1), (-2, 1), (-2, -1), (-1, 2), (1, 2), (1, -2), (-1, -2)]
bishop_moves = [(1, 1), (-1, 1), (1, -1), (-1, -1)]
rook_moves = [(1, 0), (0, 1), (-1, 0), (0, -1)]
p = piece['type']
# knight
if p.lower() == 'n':
for k_m in knight_moves:
x, y = r + k_m[0], c + k_m[1]
if check_bounds(x, y):
moved.append((r + k_m[0], c + k_m[1]))
# Bishop
elif p.lower() == 'b':
moved = gen_moves(bishop_moves)
# Rook
elif p.lower() == 'r':
moved = gen_moves(rook_moves)
# Queen
elif p.lower() == 'q':
moved = gen_moves(bishop_moves+rook_moves)
return moved
class MyTestCases(unittest.TestCase):
def test_get_moves(self):
board = [[{'type': 'q', 'color': 'b'}, '', ''],
['', '', '', ''],
['', {'type': 'n', 'color': 'w'}, '', ''],
['', {'type': 'q', 'color': 'w'}, '', '']]
#check we selected a knight
self.assertEqual(board[2][1]['type'], 'n')
# knight moves
self.assertEqual(get_moves(board[2][1], 2, 1, board), [(0, 2), (0, 0), (1,3), (3, 3)])
self.assertEqual(board[3][1]['type'], 'q')
print(get_moves(board[3][1], 3, 1, board))
# self.assertEqual(get_moves(board[3][1], 3, 1, board))
| mit |
wgcv/SWW-Crashphone | lib/python2.7/site-packages/django/views/csrf.py | 107 | 4958 | from django.conf import settings
from django.http import HttpResponseForbidden
from django.template import Context, Template
from django.utils.translation import ugettext as _
# We include the template inline since we need to be able to reliably display
# this error message, especially for the sake of developers, and there isn't any
# other way of making it available independent of what is in the settings file.
# Only the text appearing with DEBUG=False is translated. Normal translation
# tags cannot be used with this inline templates as makemessages would not be
# able to discover the strings.
CSRF_FAILURE_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>403 Forbidden</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
#info { background:#f6f6f6; }
#info ul { margin: 0.5em 4em; }
#info p, #summary p { padding-top:10px; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>{{ title }} <span>(403)</span></h1>
<p>{{ main }}</p>
{% if no_referer %}
<p>{{ no_referer1 }}</p>
<p>{{ no_referer2 }}</p>
{% endif %}
{% if no_cookie %}
<p>{{ no_cookie1 }}</p>
<p>{{ no_cookie2 }}</p>
{% endif %}
</div>
{% if DEBUG %}
<div id="info">
<h2>Help</h2>
{% if reason %}
<p>Reason given for failure:</p>
<pre>
{{ reason }}
</pre>
{% endif %}
<p>In general, this can occur when there is a genuine Cross Site Request Forgery, or when
<a
href='http://docs.djangoproject.com/en/dev/ref/contrib/csrf/#ref-contrib-csrf'>Django's
CSRF mechanism</a> has not been used correctly. For POST forms, you need to
ensure:</p>
<ul>
<li>Your browser is accepting cookies.</li>
<li>The view function uses <a
href='http://docs.djangoproject.com/en/dev/ref/templates/api/#subclassing-context-requestcontext'><code>RequestContext</code></a>
for the template, instead of <code>Context</code>.</li>
<li>In the template, there is a <code>{% templatetag openblock %} csrf_token
{% templatetag closeblock %}</code> template tag inside each POST form that
targets an internal URL.</li>
<li>If you are not using <code>CsrfViewMiddleware</code>, then you must use
<code>csrf_protect</code> on any views that use the <code>csrf_token</code>
template tag, as well as those that accept the POST data.</li>
</ul>
<p>You're seeing the help section of this page because you have <code>DEBUG =
True</code> in your Django settings file. Change that to <code>False</code>,
and only the initial error message will be displayed. </p>
<p>You can customize this page using the CSRF_FAILURE_VIEW setting.</p>
</div>
{% else %}
<div id="explanation">
<p><small>{{ more }}</small></p>
</div>
{% endif %}
</body>
</html>
"""
def csrf_failure(request, reason=""):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_REFERER, REASON_NO_CSRF_COOKIE
t = Template(CSRF_FAILURE_TEMPLATE)
c = Context({
'title': _("Forbidden"),
'main': _("CSRF verification failed. Request aborted."),
'reason': reason,
'no_referer': reason == REASON_NO_REFERER,
'no_referer1': _(
"You are seeing this message because this HTTPS site requires a "
"'Referer header' to be sent by your Web browser, but none was "
"sent. This header is required for security reasons, to ensure "
"that your browser is not being hijacked by third parties."),
'no_referer2': _(
"If you have configured your browser to disable 'Referer' headers, "
"please re-enable them, at least for this site, or for HTTPS "
"connections, or for 'same-origin' requests."),
'no_cookie': reason == REASON_NO_CSRF_COOKIE,
'no_cookie1': _(
"You are seeing this message because this site requires a CSRF "
"cookie when submitting forms. This cookie is required for "
"security reasons, to ensure that your browser is not being "
"hijacked by third parties."),
'no_cookie2': _(
"If you have configured your browser to disable cookies, please "
"re-enable them, at least for this site, or for 'same-origin' "
"requests."),
'DEBUG': settings.DEBUG,
'more': _("More information is available with DEBUG=True."),
})
return HttpResponseForbidden(t.render(c), content_type='text/html')
| apache-2.0 |
ArthurGarnier/SickRage | lib/hachoir_parser/file_system/ntfs.py | 86 | 11594 | """
New Technology File System (NTFS) file system parser.
Sources:
- The NTFS documentation
http://www.linux-ntfs.org/
- NTFS-3G driver
http://www.ntfs-3g.org/
Creation date: 3rd january 2007
Author: Victor Stinner
"""
SECTOR_SIZE = 512
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet, Enum,
UInt8, UInt16, UInt32, UInt64, TimestampWin64,
String, Bytes, Bit,
NullBits, NullBytes, PaddingBytes, RawBytes)
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_core.text_handler import textHandler, hexadecimal, filesizeHandler
from hachoir_core.tools import humanFilesize, createDict
from hachoir_parser.common.msdos import MSDOSFileAttr32
class BiosParameterBlock(FieldSet):
"""
BIOS parameter block (bpb) structure
"""
static_size = 25 * 8
MEDIA_TYPE = {0xf8: "Hard disk"}
def createFields(self):
yield UInt16(self, "bytes_per_sector", "Size of a sector in bytes")
yield UInt8(self, "sectors_per_cluster", "Size of a cluster in sectors")
yield NullBytes(self, "reserved_sectors", 2)
yield NullBytes(self, "fats", 1)
yield NullBytes(self, "root_entries", 2)
yield NullBytes(self, "sectors", 2)
yield Enum(UInt8(self, "media_type"), self.MEDIA_TYPE)
yield NullBytes(self, "sectors_per_fat", 2)
yield UInt16(self, "sectors_per_track")
yield UInt16(self, "heads")
yield UInt32(self, "hidden_sectors")
yield NullBytes(self, "large_sectors", 4)
def validate(self):
if self["bytes_per_sector"].value not in (256, 512, 1024, 2048, 4096):
return "Invalid sector size (%u bytes)" % \
self["bytes_per_sector"].value
if self["sectors_per_cluster"].value not in (1, 2, 4, 8, 16, 32, 64, 128):
return "Invalid cluster size (%u sectors)" % \
self["sectors_per_cluster"].value
return ""
class MasterBootRecord(FieldSet):
static_size = 512*8
def createFields(self):
yield Bytes(self, "jump", 3, "Intel x86 jump instruction")
yield String(self, "name", 8)
yield BiosParameterBlock(self, "bios", "BIOS parameters")
yield textHandler(UInt8(self, "physical_drive", "(0x80)"), hexadecimal)
yield NullBytes(self, "current_head", 1)
yield textHandler(UInt8(self, "ext_boot_sig", "Extended boot signature (0x80)"), hexadecimal)
yield NullBytes(self, "unused", 1)
yield UInt64(self, "nb_sectors")
yield UInt64(self, "mft_cluster", "Cluster location of MFT data")
yield UInt64(self, "mftmirr_cluster", "Cluster location of copy of MFT")
yield UInt8(self, "cluster_per_mft", "MFT record size in clusters")
yield NullBytes(self, "reserved[]", 3)
yield UInt8(self, "cluster_per_index", "Index block size in clusters")
yield NullBytes(self, "reserved[]", 3)
yield textHandler(UInt64(self, "serial_number"), hexadecimal)
yield textHandler(UInt32(self, "checksum", "Boot sector checksum"), hexadecimal)
yield Bytes(self, "boot_code", 426)
yield Bytes(self, "mbr_magic", 2, r"Master boot record magic number (\x55\xAA)")
def createDescription(self):
size = self["nb_sectors"].value * self["bios/bytes_per_sector"].value
return "NTFS Master Boot Record (%s)" % humanFilesize(size)
class MFT_Flags(FieldSet):
static_size = 16
def createFields(self):
yield Bit(self, "in_use")
yield Bit(self, "is_directory")
yield NullBits(self, "padding", 14)
class Attribute(FieldSet):
# --- Common code ---
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["size"].value * 8
type = self["type"].value
if type in self.ATTR_INFO:
self._name = self.ATTR_INFO[type][0]
self._parser = self.ATTR_INFO[type][2]
def createFields(self):
yield Enum(textHandler(UInt32(self, "type"), hexadecimal), self.ATTR_NAME)
yield UInt32(self, "size")
yield UInt8(self, "non_resident", "Non-resident flag")
yield UInt8(self, "name_length", "Name length in bytes")
yield UInt16(self, "name_offset", "Name offset")
yield UInt16(self, "flags")
yield textHandler(UInt16(self, "attribute_id"), hexadecimal)
yield UInt32(self, "length_attr", "Length of the Attribute")
yield UInt16(self, "offset_attr", "Offset of the Attribute")
yield UInt8(self, "indexed_flag")
yield NullBytes(self, "padding", 1)
if self._parser:
for field in self._parser(self):
yield field
else:
size = self["length_attr"].value
if size:
yield RawBytes(self, "data", size)
size = (self.size - self.current_size) // 8
if size:
yield PaddingBytes(self, "end_padding", size)
def createDescription(self):
return "Attribute %s" % self["type"].display
FILENAME_NAMESPACE = {
0: "POSIX",
1: "Win32",
2: "DOS",
3: "Win32 & DOS",
}
# --- Parser specific to a type ---
def parseStandardInfo(self):
yield TimestampWin64(self, "ctime", "File Creation")
yield TimestampWin64(self, "atime", "File Altered")
yield TimestampWin64(self, "mtime", "MFT Changed")
yield TimestampWin64(self, "rtime", "File Read")
yield MSDOSFileAttr32(self, "file_attr", "DOS File Permissions")
yield UInt32(self, "max_version", "Maximum Number of Versions")
yield UInt32(self, "version", "Version Number")
yield UInt32(self, "class_id")
yield UInt32(self, "owner_id")
yield UInt32(self, "security_id")
yield filesizeHandler(UInt64(self, "quota_charged", "Quota Charged"))
yield UInt64(self, "usn", "Update Sequence Number (USN)")
def parseFilename(self):
yield UInt64(self, "ref", "File reference to the parent directory")
yield TimestampWin64(self, "ctime", "File Creation")
yield TimestampWin64(self, "atime", "File Altered")
yield TimestampWin64(self, "mtime", "MFT Changed")
yield TimestampWin64(self, "rtime", "File Read")
yield filesizeHandler(UInt64(self, "alloc_size", "Allocated size of the file"))
yield filesizeHandler(UInt64(self, "real_size", "Real size of the file"))
yield UInt32(self, "file_flags")
yield UInt32(self, "file_flags2", "Used by EAs and Reparse")
yield UInt8(self, "filename_length", "Filename length in characters")
yield Enum(UInt8(self, "filename_namespace"), self.FILENAME_NAMESPACE)
size = self["filename_length"].value * 2
if size:
yield String(self, "filename", size, charset="UTF-16-LE")
def parseData(self):
size = (self.size - self.current_size) // 8
if size:
yield Bytes(self, "data", size)
def parseBitmap(self):
size = (self.size - self.current_size)
for index in xrange(size):
yield Bit(self, "bit[]")
# --- Type information ---
ATTR_INFO = {
0x10: ('standard_info', 'STANDARD_INFORMATION ', parseStandardInfo),
0x20: ('attr_list', 'ATTRIBUTE_LIST ', None),
0x30: ('filename', 'FILE_NAME ', parseFilename),
0x40: ('vol_ver', 'VOLUME_VERSION', None),
0x40: ('obj_id', 'OBJECT_ID ', None),
0x50: ('security', 'SECURITY_DESCRIPTOR ', None),
0x60: ('vol_name', 'VOLUME_NAME ', None),
0x70: ('vol_info', 'VOLUME_INFORMATION ', None),
0x80: ('data', 'DATA ', parseData),
0x90: ('index_root', 'INDEX_ROOT ', None),
0xA0: ('index_alloc', 'INDEX_ALLOCATION ', None),
0xB0: ('bitmap', 'BITMAP ', parseBitmap),
0xC0: ('sym_link', 'SYMBOLIC_LINK', None),
0xC0: ('reparse', 'REPARSE_POINT ', None),
0xD0: ('ea_info', 'EA_INFORMATION ', None),
0xE0: ('ea', 'EA ', None),
0xF0: ('prop_set', 'PROPERTY_SET', None),
0x100: ('log_util', 'LOGGED_UTILITY_STREAM', None),
}
ATTR_NAME = createDict(ATTR_INFO, 1)
class File(FieldSet):
# static_size = 48*8
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = self["bytes_allocated"].value * 8
def createFields(self):
yield Bytes(self, "signature", 4, "Usually the magic is 'FILE'")
yield UInt16(self, "usa_ofs", "Update Sequence Array offset")
yield UInt16(self, "usa_count", "Update Sequence Array count")
yield UInt64(self, "lsn", "$LogFile sequence number for this record")
yield UInt16(self, "sequence_number", "Number of times this mft record has been reused")
yield UInt16(self, "link_count", "Number of hard links")
yield UInt16(self, "attrs_offset", "Byte offset to the first attribute")
yield MFT_Flags(self, "flags")
yield UInt32(self, "bytes_in_use", "Number of bytes used in this record")
yield UInt32(self, "bytes_allocated", "Number of bytes allocated for this record")
yield UInt64(self, "base_mft_record")
yield UInt16(self, "next_attr_instance")
# The below fields are specific to NTFS 3.1+ (Windows XP and above)
yield NullBytes(self, "reserved", 2)
yield UInt32(self, "mft_record_number", "Number of this mft record")
padding = self.seekByte(self["attrs_offset"].value, relative=True)
if padding:
yield padding
while not self.eof:
addr = self.absolute_address + self.current_size
if self.stream.readBytes(addr, 4) == "\xFF\xFF\xFF\xFF":
yield Bytes(self, "attr_end_marker", 8)
break
yield Attribute(self, "attr[]")
size = self["bytes_in_use"].value - self.current_size//8
if size:
yield RawBytes(self, "end_rawdata", size)
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "end_padding", size, "Unused but allocated bytes")
def createDescription(self):
text = "File"
if "filename/filename" in self:
text += ' "%s"' % self["filename/filename"].value
if "filename/real_size" in self:
text += ' (%s)' % self["filename/real_size"].display
if "standard_info/file_attr" in self:
text += ', %s' % self["standard_info/file_attr"].display
return text
class NTFS(Parser):
MAGIC = "\xEB\x52\x90NTFS "
PARSER_TAGS = {
"id": "ntfs",
"category": "file_system",
"description": "NTFS file system",
"min_size": 1024*8,
"magic": ((MAGIC, 0),),
}
endian = LITTLE_ENDIAN
_cluster_size = None
def validate(self):
if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
return "Invalid magic string"
err = self["mbr/bios"].validate()
if err:
return err
return True
def createFields(self):
yield MasterBootRecord(self, "mbr")
bios = self["mbr/bios"]
cluster_size = bios["sectors_per_cluster"].value * bios["bytes_per_sector"].value
offset = self["mbr/mft_cluster"].value * cluster_size
padding = self.seekByte(offset, relative=False)
if padding:
yield padding
for index in xrange(1000):
yield File(self, "file[]")
size = (self.size - self.current_size) // 8
if size:
yield RawBytes(self, "end", size)
| gpl-3.0 |
Intel-tensorflow/tensorflow | tensorflow/python/keras/applications/xception.py | 6 | 13122 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""Xception V1 model for Keras.
On ImageNet, this model gets to a top-1 validation accuracy of 0.790
and a top-5 validation accuracy of 0.945.
Reference:
- [Xception: Deep Learning with Depthwise Separable Convolutions](
https://arxiv.org/abs/1610.02357) (CVPR 2017)
"""
from tensorflow.python.keras import backend
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import VersionAwareLayers
from tensorflow.python.keras.utils import data_utils
from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.lib.io import file_io
from tensorflow.python.util.tf_export import keras_export
TF_WEIGHTS_PATH = (
'https://storage.googleapis.com/tensorflow/keras-applications/'
'xception/xception_weights_tf_dim_ordering_tf_kernels.h5')
TF_WEIGHTS_PATH_NO_TOP = (
'https://storage.googleapis.com/tensorflow/keras-applications/'
'xception/xception_weights_tf_dim_ordering_tf_kernels_notop.h5')
layers = VersionAwareLayers()
@keras_export('keras.applications.xception.Xception',
'keras.applications.Xception')
def Xception(
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax'):
"""Instantiates the Xception architecture.
Reference:
- [Xception: Deep Learning with Depthwise Separable Convolutions](
https://arxiv.org/abs/1610.02357) (CVPR 2017)
For image classification use cases, see
[this page for detailed examples](
https://keras.io/api/applications/#usage-examples-for-image-classification-models).
For transfer learning use cases, make sure to read the
[guide to transfer learning & fine-tuning](
https://keras.io/guides/transfer_learning/).
The default input image size for this model is 299x299.
Note: each Keras Application expects a specific kind of input preprocessing.
For Xception, call `tf.keras.applications.xception.preprocess_input` on your
inputs before passing them to the model.
`xception.preprocess_input` will scale input pixels between -1 and 1.
Args:
include_top: whether to include the fully-connected
layer at the top of the network.
weights: one of `None` (random initialization),
'imagenet' (pre-training on ImageNet),
or the path to the weights file to be loaded.
input_tensor: optional Keras tensor
(i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(299, 299, 3)`.
It should have exactly 3 inputs channels,
and width and height should be no smaller than 71.
E.g. `(150, 150, 3)` would be one valid value.
pooling: Optional pooling mode for feature extraction
when `include_top` is `False`.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional block.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional block, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True,
and if no `weights` argument is specified.
classifier_activation: A `str` or callable. The activation function to use
on the "top" layer. Ignored unless `include_top=True`. Set
`classifier_activation=None` to return the logits of the "top" layer.
When loading pretrained weights, `classifier_activation` can only
be `None` or `"softmax"`.
Returns:
A `keras.Model` instance.
"""
if not (weights in {'imagenet', None} or file_io.file_exists_v2(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as `"imagenet"` with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = imagenet_utils.obtain_input_shape(
input_shape,
default_size=299,
min_size=71,
data_format=backend.image_data_format(),
require_flatten=include_top,
weights=weights)
if input_tensor is None:
img_input = layers.Input(shape=input_shape)
else:
if not backend.is_keras_tensor(input_tensor):
img_input = layers.Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
channel_axis = 1 if backend.image_data_format() == 'channels_first' else -1
x = layers.Conv2D(
32, (3, 3),
strides=(2, 2),
use_bias=False,
name='block1_conv1')(img_input)
x = layers.BatchNormalization(axis=channel_axis, name='block1_conv1_bn')(x)
x = layers.Activation('relu', name='block1_conv1_act')(x)
x = layers.Conv2D(64, (3, 3), use_bias=False, name='block1_conv2')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block1_conv2_bn')(x)
x = layers.Activation('relu', name='block1_conv2_act')(x)
residual = layers.Conv2D(
128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = layers.BatchNormalization(axis=channel_axis)(residual)
x = layers.SeparableConv2D(
128, (3, 3), padding='same', use_bias=False, name='block2_sepconv1')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv1_bn')(x)
x = layers.Activation('relu', name='block2_sepconv2_act')(x)
x = layers.SeparableConv2D(
128, (3, 3), padding='same', use_bias=False, name='block2_sepconv2')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block2_sepconv2_bn')(x)
x = layers.MaxPooling2D((3, 3),
strides=(2, 2),
padding='same',
name='block2_pool')(x)
x = layers.add([x, residual])
residual = layers.Conv2D(
256, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = layers.BatchNormalization(axis=channel_axis)(residual)
x = layers.Activation('relu', name='block3_sepconv1_act')(x)
x = layers.SeparableConv2D(
256, (3, 3), padding='same', use_bias=False, name='block3_sepconv1')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv1_bn')(x)
x = layers.Activation('relu', name='block3_sepconv2_act')(x)
x = layers.SeparableConv2D(
256, (3, 3), padding='same', use_bias=False, name='block3_sepconv2')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block3_sepconv2_bn')(x)
x = layers.MaxPooling2D((3, 3),
strides=(2, 2),
padding='same',
name='block3_pool')(x)
x = layers.add([x, residual])
residual = layers.Conv2D(
728, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = layers.BatchNormalization(axis=channel_axis)(residual)
x = layers.Activation('relu', name='block4_sepconv1_act')(x)
x = layers.SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block4_sepconv1')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block4_sepconv1_bn')(x)
x = layers.Activation('relu', name='block4_sepconv2_act')(x)
x = layers.SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block4_sepconv2')(x)
x = layers.BatchNormalization(axis=channel_axis, name='block4_sepconv2_bn')(x)
x = layers.MaxPooling2D((3, 3),
strides=(2, 2),
padding='same',
name='block4_pool')(x)
x = layers.add([x, residual])
for i in range(8):
residual = x
prefix = 'block' + str(i + 5)
x = layers.Activation('relu', name=prefix + '_sepconv1_act')(x)
x = layers.SeparableConv2D(
728, (3, 3),
padding='same',
use_bias=False,
name=prefix + '_sepconv1')(x)
x = layers.BatchNormalization(
axis=channel_axis, name=prefix + '_sepconv1_bn')(x)
x = layers.Activation('relu', name=prefix + '_sepconv2_act')(x)
x = layers.SeparableConv2D(
728, (3, 3),
padding='same',
use_bias=False,
name=prefix + '_sepconv2')(x)
x = layers.BatchNormalization(
axis=channel_axis, name=prefix + '_sepconv2_bn')(x)
x = layers.Activation('relu', name=prefix + '_sepconv3_act')(x)
x = layers.SeparableConv2D(
728, (3, 3),
padding='same',
use_bias=False,
name=prefix + '_sepconv3')(x)
x = layers.BatchNormalization(
axis=channel_axis, name=prefix + '_sepconv3_bn')(x)
x = layers.add([x, residual])
residual = layers.Conv2D(
1024, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x)
residual = layers.BatchNormalization(axis=channel_axis)(residual)
x = layers.Activation('relu', name='block13_sepconv1_act')(x)
x = layers.SeparableConv2D(
728, (3, 3), padding='same', use_bias=False, name='block13_sepconv1')(x)
x = layers.BatchNormalization(
axis=channel_axis, name='block13_sepconv1_bn')(x)
x = layers.Activation('relu', name='block13_sepconv2_act')(x)
x = layers.SeparableConv2D(
1024, (3, 3), padding='same', use_bias=False, name='block13_sepconv2')(x)
x = layers.BatchNormalization(
axis=channel_axis, name='block13_sepconv2_bn')(x)
x = layers.MaxPooling2D((3, 3),
strides=(2, 2),
padding='same',
name='block13_pool')(x)
x = layers.add([x, residual])
x = layers.SeparableConv2D(
1536, (3, 3), padding='same', use_bias=False, name='block14_sepconv1')(x)
x = layers.BatchNormalization(
axis=channel_axis, name='block14_sepconv1_bn')(x)
x = layers.Activation('relu', name='block14_sepconv1_act')(x)
x = layers.SeparableConv2D(
2048, (3, 3), padding='same', use_bias=False, name='block14_sepconv2')(x)
x = layers.BatchNormalization(
axis=channel_axis, name='block14_sepconv2_bn')(x)
x = layers.Activation('relu', name='block14_sepconv2_act')(x)
if include_top:
x = layers.GlobalAveragePooling2D(name='avg_pool')(x)
imagenet_utils.validate_activation(classifier_activation, weights)
x = layers.Dense(classes, activation=classifier_activation,
name='predictions')(x)
else:
if pooling == 'avg':
x = layers.GlobalAveragePooling2D()(x)
elif pooling == 'max':
x = layers.GlobalMaxPooling2D()(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = training.Model(inputs, x, name='xception')
# Load weights.
if weights == 'imagenet':
if include_top:
weights_path = data_utils.get_file(
'xception_weights_tf_dim_ordering_tf_kernels.h5',
TF_WEIGHTS_PATH,
cache_subdir='models',
file_hash='0a58e3b7378bc2990ea3b43d5981f1f6')
else:
weights_path = data_utils.get_file(
'xception_weights_tf_dim_ordering_tf_kernels_notop.h5',
TF_WEIGHTS_PATH_NO_TOP,
cache_subdir='models',
file_hash='b0042744bf5b25fce3cb969f33bebb97')
model.load_weights(weights_path)
elif weights is not None:
model.load_weights(weights)
return model
@keras_export('keras.applications.xception.preprocess_input')
def preprocess_input(x, data_format=None):
return imagenet_utils.preprocess_input(x, data_format=data_format, mode='tf')
@keras_export('keras.applications.xception.decode_predictions')
def decode_predictions(preds, top=5):
return imagenet_utils.decode_predictions(preds, top=top)
preprocess_input.__doc__ = imagenet_utils.PREPROCESS_INPUT_DOC.format(
mode='',
ret=imagenet_utils.PREPROCESS_INPUT_RET_DOC_TF,
error=imagenet_utils.PREPROCESS_INPUT_ERROR_DOC)
decode_predictions.__doc__ = imagenet_utils.decode_predictions.__doc__
| apache-2.0 |
Edraak/edx-platform | lms/djangoapps/discussion_api/forms.py | 30 | 3800 | """
Discussion API forms
"""
from django.core.exceptions import ValidationError
from django.forms import (
BooleanField,
CharField,
ChoiceField,
Form,
IntegerField,
NullBooleanField,
Select)
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locator import CourseLocator
from openedx.core.djangoapps.util.forms import MultiValueField, ExtendedNullBooleanField
class _PaginationForm(Form):
"""A form that includes pagination fields"""
page = IntegerField(required=False, min_value=1)
page_size = IntegerField(required=False, min_value=1)
def clean_page(self):
"""Return given valid page or default of 1"""
return self.cleaned_data.get("page") or 1
def clean_page_size(self):
"""Return given valid page_size (capped at 100) or default of 10"""
return min(self.cleaned_data.get("page_size") or 10, 100)
class ThreadListGetForm(_PaginationForm):
"""
A form to validate query parameters in the thread list retrieval endpoint
"""
EXCLUSIVE_PARAMS = ["topic_id", "text_search", "following"]
course_id = CharField()
topic_id = MultiValueField(required=False)
text_search = CharField(required=False)
following = ExtendedNullBooleanField(required=False)
view = ChoiceField(
choices=[(choice, choice) for choice in ["unread", "unanswered"]],
required=False,
)
order_by = ChoiceField(
choices=[(choice, choice) for choice in ["last_activity_at", "comment_count", "vote_count"]],
required=False
)
order_direction = ChoiceField(
choices=[(choice, choice) for choice in ["asc", "desc"]],
required=False
)
def clean_order_by(self):
"""Return a default choice"""
return self.cleaned_data.get("order_by") or "last_activity_at"
def clean_order_direction(self):
"""Return a default choice"""
return self.cleaned_data.get("order_direction") or "desc"
def clean_course_id(self):
"""Validate course_id"""
value = self.cleaned_data["course_id"]
try:
return CourseLocator.from_string(value)
except InvalidKeyError:
raise ValidationError("'{}' is not a valid course id".format(value))
def clean_following(self):
"""Validate following"""
value = self.cleaned_data["following"]
if value is False:
raise ValidationError("The value of the 'following' parameter must be true.")
else:
return value
def clean(self):
cleaned_data = super(ThreadListGetForm, self).clean()
exclusive_params_count = sum(
1 for param in self.EXCLUSIVE_PARAMS if cleaned_data.get(param)
)
if exclusive_params_count > 1:
raise ValidationError(
"The following query parameters are mutually exclusive: {}".format(
", ".join(self.EXCLUSIVE_PARAMS)
)
)
return cleaned_data
class ThreadActionsForm(Form):
"""
A form to handle fields in thread creation/update that require separate
interactions with the comments service.
"""
following = BooleanField(required=False)
voted = BooleanField(required=False)
abuse_flagged = BooleanField(required=False)
class CommentListGetForm(_PaginationForm):
"""
A form to validate query parameters in the comment list retrieval endpoint
"""
thread_id = CharField()
endorsed = ExtendedNullBooleanField(required=False)
class CommentActionsForm(Form):
"""
A form to handle fields in comment creation/update that require separate
interactions with the comments service.
"""
voted = BooleanField(required=False)
abuse_flagged = BooleanField(required=False)
| agpl-3.0 |
jburwell/ansible-modules-extras | source_control/bzr.py | 38 | 6431 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, André Paramés <git@andreparames.com>
# Based on the Git module by Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = u'''
---
module: bzr
author: André Paramés
version_added: "1.1"
short_description: Deploy software (or files) from bzr branches
description:
- Manage I(bzr) branches to deploy files or software.
options:
name:
required: true
aliases: [ 'parent' ]
description:
- SSH or HTTP protocol address of the parent branch.
dest:
required: true
description:
- Absolute path of where the branch should be cloned to.
version:
required: false
default: "head"
description:
- What version of the branch to clone. This can be the
bzr revno or revid.
force:
required: false
default: "yes"
choices: [ 'yes', 'no' ]
description:
- If C(yes), any modified files in the working
tree will be discarded.
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to bzr executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
'''
EXAMPLES = '''
# Example bzr checkout from Ansible Playbooks
- bzr: name=bzr+ssh://foosball.example.org/path/to/branch dest=/srv/checkout version=22
'''
import re
class Bzr(object):
def __init__(self, module, parent, dest, version, bzr_path):
self.module = module
self.parent = parent
self.dest = dest
self.version = version
self.bzr_path = bzr_path
def _command(self, args_list, cwd=None, **kwargs):
(rc, out, err) = self.module.run_command([self.bzr_path] + args_list, cwd=cwd, **kwargs)
return (rc, out, err)
def get_version(self):
'''samples the version of the bzr branch'''
cmd = "%s revno" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
revno = stdout.strip()
return revno
def clone(self):
'''makes a new bzr branch if it does not already exist'''
dest_dirname = os.path.dirname(self.dest)
try:
os.makedirs(dest_dirname)
except:
pass
if self.version.lower() != 'head':
args_list = ["branch", "-r", self.version, self.parent, self.dest]
else:
args_list = ["branch", self.parent, self.dest]
return self._command(args_list, check_rc=True, cwd=dest_dirname)
def has_local_mods(self):
cmd = "%s status -S" % self.bzr_path
rc, stdout, stderr = self.module.run_command(cmd, cwd=self.dest)
lines = stdout.splitlines()
lines = filter(lambda c: not re.search('^\\?\\?.*$', c), lines)
return len(lines) > 0
def reset(self, force):
'''
Resets the index and working tree to head.
Discards any changes to tracked files in the working
tree since that commit.
'''
if not force and self.has_local_mods():
self.module.fail_json(msg="Local modifications exist in branch (force=no).")
return self._command(["revert"], check_rc=True, cwd=self.dest)
def fetch(self):
'''updates branch from remote sources'''
if self.version.lower() != 'head':
(rc, out, err) = self._command(["pull", "-r", self.version], cwd=self.dest)
else:
(rc, out, err) = self._command(["pull"], cwd=self.dest)
if rc != 0:
self.module.fail_json(msg="Failed to pull")
return (rc, out, err)
def switch_version(self):
'''once pulled, switch to a particular revno or revid'''
if self.version.lower() != 'head':
args_list = ["revert", "-r", self.version]
else:
args_list = ["revert"]
return self._command(args_list, check_rc=True, cwd=self.dest)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
dest=dict(required=True),
name=dict(required=True, aliases=['parent']),
version=dict(default='head'),
force=dict(default='yes', type='bool'),
executable=dict(default=None),
)
)
dest = os.path.abspath(os.path.expanduser(module.params['dest']))
parent = module.params['name']
version = module.params['version']
force = module.params['force']
bzr_path = module.params['executable'] or module.get_bin_path('bzr', True)
bzrconfig = os.path.join(dest, '.bzr', 'branch', 'branch.conf')
rc, out, err, status = (0, None, None, None)
bzr = Bzr(module, parent, dest, version, bzr_path)
# if there is no bzr configuration, do a branch operation
# else pull and switch the version
before = None
local_mods = False
if not os.path.exists(bzrconfig):
(rc, out, err) = bzr.clone()
else:
# else do a pull
local_mods = bzr.has_local_mods()
before = bzr.get_version()
(rc, out, err) = bzr.reset(force)
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = bzr.fetch()
if rc != 0:
module.fail_json(msg=err)
# switch to version specified regardless of whether
# we cloned or pulled
(rc, out, err) = bzr.switch_version()
# determine if we changed anything
after = bzr.get_version()
changed = False
if before != after or local_mods:
changed = True
module.exit_json(changed=changed, before=before, after=after)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
wujuguang/sentry | tests/sentry/web/frontend/test_organization_audit_log.py | 25 | 1571 | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuditLogEntry, AuditLogEntryEvent
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationAuditLogPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationAuditLogPermissionTest, self).setUp()
self.path = reverse('sentry-organization-audit-log', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationAuditLogTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-audit-log', args=[organization.slug])
AuditLogEntry.objects.create(
organization=organization,
actor=self.user,
event=AuditLogEntryEvent.ORG_ADD,
)
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-audit-log.html')
assert resp.context['organization'] == organization
assert len(resp.context['audit_log_queryset']) == 1
| bsd-3-clause |
ksooklall/deep_learning_foundation | transfer-learning/tensorflow_vgg/utils.py | 145 | 1972 | import skimage
import skimage.io
import skimage.transform
import numpy as np
# synset = [l.strip() for l in open('synset.txt').readlines()]
# returns image of shape [224, 224, 3]
# [height, width, depth]
def load_image(path):
# load image
img = skimage.io.imread(path)
img = img / 255.0
assert (0 <= img).all() and (img <= 1.0).all()
# print "Original Image Shape: ", img.shape
# we crop image from center
short_edge = min(img.shape[:2])
yy = int((img.shape[0] - short_edge) / 2)
xx = int((img.shape[1] - short_edge) / 2)
crop_img = img[yy: yy + short_edge, xx: xx + short_edge]
# resize to 224, 224
resized_img = skimage.transform.resize(crop_img, (224, 224), mode='constant')
return resized_img
# returns the top1 string
def print_prob(prob, file_path):
synset = [l.strip() for l in open(file_path).readlines()]
# print prob
pred = np.argsort(prob)[::-1]
# Get top1 label
top1 = synset[pred[0]]
print(("Top1: ", top1, prob[pred[0]]))
# Get top5 label
top5 = [(synset[pred[i]], prob[pred[i]]) for i in range(5)]
print(("Top5: ", top5))
return top1
def load_image2(path, height=None, width=None):
# load image
img = skimage.io.imread(path)
img = img / 255.0
if height is not None and width is not None:
ny = height
nx = width
elif height is not None:
ny = height
nx = img.shape[1] * ny / img.shape[0]
elif width is not None:
nx = width
ny = img.shape[0] * nx / img.shape[1]
else:
ny = img.shape[0]
nx = img.shape[1]
return skimage.transform.resize(img, (ny, nx), mode='constant')
def test():
img = skimage.io.imread("./test_data/starry_night.jpg")
ny = 300
nx = img.shape[1] * ny / img.shape[0]
img = skimage.transform.resize(img, (ny, nx), mode='constant')
skimage.io.imsave("./test_data/test/output.jpg", img)
if __name__ == "__main__":
test()
| mit |
ESOedX/edx-platform | pavelib/paver_tests/test_paver_quality.py | 1 | 14003 | """
Tests for paver quality tasks
"""
from __future__ import absolute_import, print_function
import os
import shutil
import tempfile
import textwrap
import unittest
import six
from ddt import data, ddt, file_data, unpack
from mock import MagicMock, mock_open, patch
from path import Path as path
from paver.easy import BuildFailure
import pavelib.quality
from pavelib.paver_tests.utils import PaverTestCase, fail_on_eslint
if six.PY2:
OPEN_BUILTIN = '__builtin__.open'
else:
OPEN_BUILTIN = 'builtins.open'
@ddt
class TestPaverQualityViolations(unittest.TestCase):
"""
For testing the paver violations-counting tasks
"""
def setUp(self):
super(TestPaverQualityViolations, self).setUp()
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
self.addCleanup(os.remove, self.f.name)
def test_pylint_parser_other_string(self):
with open(self.f.name, 'w') as f:
f.write("hello")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
def test_pylint_parser_pep8(self):
# Pep8 violations should be ignored.
with open(self.f.name, 'w') as f:
f.write("foo/hello/test.py:304:15: E203 whitespace before ':'")
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 0)
@file_data('pylint_test_list.json')
def test_pylint_parser_count_violations(self, value):
"""
Tests:
- Different types of violations
- One violation covering multiple lines
"""
with open(self.f.name, 'w') as f:
f.write(value)
num = pavelib.quality._count_pylint_violations(f.name) # pylint: disable=protected-access
self.assertEqual(num, 1)
def test_pep8_parser(self):
with open(self.f.name, 'w') as f:
f.write("hello\nhithere")
num = len(pavelib.quality._pep8_violations(f.name)) # pylint: disable=protected-access
self.assertEqual(num, 2)
@ddt
class TestPaverQualityOptions(unittest.TestCase):
"""
Tests the paver pylint command-line options parsing.
"""
@data(
({'limit': '5500'}, (-1, 5500, False, pavelib.quality.ALL_SYSTEMS.split(','))),
({'limit': '1000:5500'}, (1000, 5500, False, pavelib.quality.ALL_SYSTEMS.split(','))),
({'limit': '1:2:3:4:5'}, (1, 2, False, pavelib.quality.ALL_SYSTEMS.split(','))),
({'system': 'lms,cms'}, (-1, -1, False, ['lms', 'cms'])),
(
{'limit': '2000:5000', 'errors': True, 'system': 'lms,cms,openedx'},
(2000, 5000, True, ['lms', 'cms', 'openedx'])
),
)
@unpack
def test_pylint_parser_other_string(self, options, expected_values):
class PaverOptions(object):
"""
Simple options class to mimick paver's Namespace object.
"""
def __init__(self, d):
self.__dict__ = d
paver_options = PaverOptions(options)
returned_values = pavelib.quality._parse_pylint_options(paver_options) # pylint: disable=protected-access
self.assertEqual(returned_values, expected_values)
class TestPaverReportViolationsCounts(unittest.TestCase):
"""
For testing utility functions for getting counts from reports for
run_eslint, run_complexity, run_xsslint, and run_xsscommitlint.
"""
def setUp(self):
super(TestPaverReportViolationsCounts, self).setUp()
# Temporary file infrastructure
self.f = tempfile.NamedTemporaryFile(delete=False)
self.f.close()
# Cleanup various mocks and tempfiles
self.addCleanup(os.remove, self.f.name)
def test_get_eslint_violations_count(self):
with open(self.f.name, 'w') as f:
f.write("3000 violations found")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "eslint") # pylint: disable=protected-access
self.assertEqual(actual_count, 3000)
def test_get_eslint_violations_no_number_found(self):
with open(self.f.name, 'w') as f:
f.write("Not expected string regex")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "eslint") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_eslint_violations_count_truncated_report(self):
"""
A truncated report (i.e. last line is just a violation)
"""
with open(self.f.name, 'w') as f:
f.write("foo/bar/js/fizzbuzz.js: line 45, col 59, Missing semicolon.")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "eslint") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_complexity_value(self):
with open(self.f.name, 'w') as f:
f.write("Average complexity: A (1.93953443446)")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "python_complexity") # pylint: disable=protected-access
self.assertEqual(actual_count, 1.93953443446)
def test_truncated_complexity_report(self):
with open(self.f.name, 'w') as f:
f.write("M 110:4 FooBar.default - A")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "python_complexity") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_no_complexity_report(self):
with self.assertRaises(BuildFailure):
pavelib.quality._get_count_from_last_line("non-existent-file", "python_complexity") # pylint: disable=protected-access
def test_generic_value(self):
"""
Default behavior is to look for an integer appearing at head of line
"""
with open(self.f.name, 'w') as f:
f.write("5.777 good to see you")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, 5)
def test_generic_value_none_found(self):
"""
Default behavior is to look for an integer appearing at head of line
"""
with open(self.f.name, 'w') as f:
f.write("hello 5.777 good to see you")
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_xsslint_counts_happy(self):
"""
Test happy path getting violation counts from xsslint report.
"""
report = textwrap.dedent("""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_xsslint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {
'javascript-concat-html': 310,
'javascript-escape': 7,
},
'total': 2608,
})
def test_get_xsslint_counts_bad_counts(self):
"""
Test getting violation counts from truncated and malformed xsslint
report.
"""
report = textwrap.dedent("""
javascript-concat-html: violations
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_xsslint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {},
'total': None,
})
def test_get_xsscommitlint_count_happy(self):
"""
Test happy path getting violation count from xsscommitlint report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_xsscommitlint_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 5)
def test_get_xsscommitlint_count_bad_counts(self):
"""
Test getting violation count from truncated xsscommitlint report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_xsscommitlint_count(self.f.name) # pylint: disable=protected-access
self.assertIsNone(count)
def test_get_xsscommitlint_count_no_files(self):
"""
Test getting violation count from xsscommitlint report where no files were
linted.
"""
report = textwrap.dedent("""
No files linted.
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_xsscommitlint_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 0)
class TestPrepareReportDir(unittest.TestCase):
"""
Tests the report directory preparation
"""
def setUp(self):
super(TestPrepareReportDir, self).setUp()
self.test_dir = tempfile.mkdtemp()
self.test_file = tempfile.NamedTemporaryFile(delete=False, dir=self.test_dir)
self.addCleanup(os.removedirs, self.test_dir)
def test_report_dir_with_files(self):
self.assertTrue(os.path.exists(self.test_file.name))
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertFalse(os.path.exists(self.test_file.name))
def test_report_dir_without_files(self):
os.remove(self.test_file.name)
pavelib.quality._prepare_report_dir(path(self.test_dir)) # pylint: disable=protected-access
self.assertEqual(os.listdir(path(self.test_dir)), [])
class TestPaverRunQuality(PaverTestCase):
"""
For testing the paver run_quality task
"""
def setUp(self):
super(TestPaverRunQuality, self).setUp()
# mock the @needs decorator to skip it
patcher = patch('pavelib.quality.sh')
self._mock_paver_sh = patcher.start()
self.addCleanup(patcher.stop)
self.report_dir = tempfile.mkdtemp()
report_dir_patcher = patch('pavelib.utils.envs.Env.REPORT_DIR', path(self.report_dir))
report_dir_patcher.start()
self.addCleanup(shutil.rmtree, self.report_dir)
self.addCleanup(report_dir_patcher.stop)
@patch(OPEN_BUILTIN, mock_open())
def test_failure_on_diffquality_pylint(self):
"""
If diff-quality fails on pylint, the paver task should also fail, but
only after runnning diff-quality with eslint
"""
# Underlying sh call must fail when it is running the pylint diff-quality task
_mock_pylint_violations = MagicMock(return_value=(10000, ['some error']))
with patch('pavelib.quality._get_pylint_violations', _mock_pylint_violations):
with patch('pavelib.quality._parse_pylint_options', return_value=(0, 1000, 0, 0)):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
# Assert that _get_pylint_violations (which calls "pylint") is called once
self.assertEqual(_mock_pylint_violations.call_count, 1)
# Assert that sh was called twice- once for diff quality with pylint
# and once for diff quality with eslint. This means that in the event
# of a diff-quality pylint failure, eslint is still called.
self.assertEqual(self._mock_paver_sh.call_count, 2)
@patch(OPEN_BUILTIN, mock_open())
def test_failure_on_diffquality_eslint(self):
"""
If diff-quality fails on eslint, the paver task should also fail
"""
# Underlying sh call must fail when it is running the eslint diff-quality task
self._mock_paver_sh.side_effect = fail_on_eslint
_mock_pylint_violations = MagicMock(return_value=(0, []))
with patch('pavelib.quality._get_pylint_violations', _mock_pylint_violations):
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(BuildFailure)
print(self._mock_paver_sh.mock_calls)
# Test that pylint is called
_mock_pylint_violations.assert_called_once_with(clean=False)
# Assert that sh was called four times - once to get the comparison commit hash,
# once to get the current commit hash, once for diff quality with pylint,
# and once for diff quality with eslint
self.assertEqual(self._mock_paver_sh.call_count, 4)
@patch(OPEN_BUILTIN, mock_open())
def test_other_exception(self):
"""
If diff-quality fails for an unknown reason on the first run, then
pylint should not be run
"""
self._mock_paver_sh.side_effect = [Exception('unrecognized failure!'), 0]
with self.assertRaises(SystemExit):
pavelib.quality.run_quality("")
self.assertRaises(Exception)
# Test that pylint is NOT called by counting calls
self.assertEqual(self._mock_paver_sh.call_count, 1)
@patch(OPEN_BUILTIN, mock_open())
def test_no_diff_quality_failures(self):
# Assert nothing is raised
pavelib.quality.run_quality("")
# And assert that sh was called 8 times:
# 6 for pylint on each of the system directories
# 1 for diff_quality for pylint
# 1 for diff_quality for eslint
self.assertEqual(self._mock_paver_sh.call_count, 8)
| agpl-3.0 |
cneill/designate | designate/tests/test_context.py | 8 | 2410 | # Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from oslo_log import log as logging
from designate.tests import TestCase
from designate import context
from designate import exceptions
LOG = logging.getLogger(__name__)
class TestDesignateContext(TestCase):
def test_deepcopy(self):
orig = context.DesignateContext(user='12345', tenant='54321')
copy = orig.deepcopy()
self.assertEqual(orig.to_dict(), copy.to_dict())
def test_elevated(self):
ctxt = context.DesignateContext(user='12345', tenant='54321')
admin_ctxt = ctxt.elevated()
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertEqual(0, len(ctxt.roles))
def test_all_tenants(self):
ctxt = context.DesignateContext(user='12345', tenant='54321')
admin_ctxt = ctxt.elevated()
admin_ctxt.all_tenants = True
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertTrue(admin_ctxt.all_tenants)
def test_all_tenants_policy_failure(self):
ctxt = context.DesignateContext(user='12345', tenant='54321')
with testtools.ExpectedException(exceptions.Forbidden):
ctxt.all_tenants = True
def test_edit_managed_records(self):
ctxt = context.DesignateContext(user='12345', tenant='54321')
admin_ctxt = ctxt.elevated()
admin_ctxt.edit_managed_records = True
self.assertFalse(ctxt.is_admin)
self.assertTrue(admin_ctxt.is_admin)
self.assertTrue(admin_ctxt.edit_managed_records)
def test_edit_managed_records_failure(self):
ctxt = context.DesignateContext(user='12345', tenant='54321')
with testtools.ExpectedException(exceptions.Forbidden):
ctxt.edit_managed_records = True
| apache-2.0 |
tcwicklund/django | django/contrib/admindocs/middleware.py | 477 | 1198 | from django import http
from django.conf import settings
class XViewMiddleware(object):
"""
Adds an X-View header to internal HEAD requests -- used by the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, quickly return with an x-header
indicating the view function. This is used by the documentation module
to lookup the view function for an arbitrary page.
"""
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'.")
if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or
(request.user.is_active and request.user.is_staff)):
response = http.HttpResponse()
response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__)
return response
| bsd-3-clause |
seisman/HinetPy | docs/conf.py | 1 | 2654 | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import datetime
import HinetPy
# -- Project information -----------------------------------------------------
year = datetime.date.today().year
project = "HinetPy"
author = "Dongdong Tian"
copyright = f"2014-{year}, {author}"
# The full version, including alpha/beta/rc tags
version = HinetPy.__version__
release = version
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.githubpages",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx_rtd_theme",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# autodoc options
autodoc_member_order = "bysource"
autoclass_content = "both"
napoleon_numpy_docstring = True
napoleon_use_admonition_for_notes = True
napoleon_use_admonition_for_examples = True
napoleon_use_admonition_for_references = True
# intersphinx configurations
intersphinx_mapping = {"https://docs.python.org/3/": None}
# Chinese translation
locale_dirs = ["locale/"] # path is example but recommended.
gettext_compact = False # optional.
| mit |
Scalr/libcloud | libcloud/common/softlayer.py | 58 | 2938 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Softlayer connection
"""
from libcloud.common.base import ConnectionUserAndKey
from libcloud.common.xmlrpc import XMLRPCResponse, XMLRPCConnection
from libcloud.common.types import InvalidCredsError, LibcloudError
class SoftLayerException(LibcloudError):
"""
Exception class for SoftLayer driver
"""
pass
class SoftLayerObjectDoesntExist(LibcloudError):
"""
Exception class for SoftLayer driver object doesnt exist
"""
pass
class SoftLayerResponse(XMLRPCResponse):
defaultExceptionCls = SoftLayerException
exceptions = {
'SoftLayer_Account': InvalidCredsError,
'SoftLayer_Exception_ObjectNotFound': SoftLayerObjectDoesntExist
}
class SoftLayerConnection(XMLRPCConnection, ConnectionUserAndKey):
responseCls = SoftLayerResponse
host = 'api.softlayer.com'
endpoint = '/xmlrpc/v3'
def request(self, service, method, *args, **kwargs):
headers = {}
headers.update(self._get_auth_headers())
headers.update(self._get_init_params(service, kwargs.get('id')))
headers.update(
self._get_object_mask(service, kwargs.get('object_mask')))
headers.update(
self._get_object_mask(service, kwargs.get('object_mask')))
args = ({'headers': headers}, ) + args
endpoint = '%s/%s' % (self.endpoint, service)
return super(SoftLayerConnection, self).request(method, *args,
**{'endpoint':
endpoint})
def _get_auth_headers(self):
return {
'authenticate': {
'username': self.user_id,
'apiKey': self.key
}
}
def _get_init_params(self, service, id):
if id is not None:
return {
'%sInitParameters' % service: {'id': id}
}
else:
return {}
def _get_object_mask(self, service, mask):
if mask is not None:
return {
'%sObjectMask' % service: {'mask': mask}
}
else:
return {}
| apache-2.0 |
weimingtom/python-for-android | python3-alpha/python3-src/Lib/tkinter/__init__.py | 45 | 155000 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import tkinter
from tkinter.constants import *
tk = tkinter.Tk()
frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision$"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
from tkinter import _fix
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
def _flatten(seq):
"""Internal function."""
res = ()
for item in seq:
if isinstance(item, (tuple, list)):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if isinstance(cnfs, dict):
return cnfs
elif isinstance(cnfs, (type(None), str)):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError) as msg:
print("_cnfmerge: fallback due to:", msg)
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code='0'):
"""Internal function. Calling it will throw the exception SystemExit."""
raise SystemExit(code)
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return [self._tk.split(x) for x in self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name))]
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, str):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use."""
if 'displayof' not in kw: kw['displayof'] = self._w
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if isinstance(data, str):
data = [self.tk.split(data)]
return [self.__winfo_parseitem(x) for x in data]
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if isinstance(item, int):
nv.append(str(item))
elif isinstance(item, str):
nv.append(('{%s}' if ' ' in item else '%s') % item)
else:
break
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.__func__
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, str):
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def keys(self):
"""Return a list of all resource names of this widget."""
return [x[0][1:] for x in
self.tk.split(self.tk.call(self._w, 'configure'))]
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w))]
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w))]
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if isinstance(cnf, str) and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return [self._nametowidget(x) for x in
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args))]
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit as msg:
raise SystemExit(msg)
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return [self._nametowidget(x) for x in self.tk.call(args)]
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)"
% (_tkinter.TK_VERSION, tk_version))
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version))
if TkVersion < 4.0:
raise RuntimeError("Tk 4.0 or higher is required; found Tk %s"
% str(TkVersion))
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls exec on the contents of BASENAME.py and
CLASSNAME.py if such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec('from tkinter import *', dir)
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
exec(open(class_py).read(), dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)]
for k, v in classes:
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return [getdouble(x) for x in
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args))]
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if isinstance(cnf, (dict, tuple)):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError('unknown option -'+kwargs.keys()[0])
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError('Too early to create image')
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, str) and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
text += "\nThis should be a cedilla: \xe7"
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| apache-2.0 |
opennvm/nvm-fast-swap | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
emiquelito/django-cms-2.0 | mptt/tests/testcases.py | 9 | 13673 | import re
from django.test import TestCase
from mptt.exceptions import InvalidMove
from mptt.tests import doctests
from mptt.tests.models import Category, Genre
def get_tree_details(nodes):
"""Creates pertinent tree details for the given list of nodes."""
opts = nodes[0]._meta
return '\n'.join(['%s %s %s %s %s %s' %
(n.pk, getattr(n, '%s_id' % opts.parent_attr) or '-',
getattr(n, opts.tree_id_attr), getattr(n, opts.level_attr),
getattr(n, opts.left_attr), getattr(n, opts.right_attr))
for n in nodes])
leading_whitespace_re = re.compile(r'^\s+', re.MULTILINE)
def tree_details(text):
"""
Trims leading whitespace from the given text specifying tree details
so triple-quoted strings can be used to provide tree details in a
readable format (says who?), to be compared with the result of using
the ``get_tree_details`` function.
"""
return leading_whitespace_re.sub('', text)
# genres.json defines the following tree structure
#
# 1 - 1 0 1 16 action
# 2 1 1 1 2 9 +-- platformer
# 3 2 1 2 3 4 | |-- platformer_2d
# 4 2 1 2 5 6 | |-- platformer_3d
# 5 2 1 2 7 8 | +-- platformer_4d
# 6 1 1 1 10 15 +-- shmup
# 7 6 1 2 11 12 |-- shmup_vertical
# 8 6 1 2 13 14 +-- shmup_horizontal
# 9 - 2 0 1 6 rpg
# 10 9 2 1 2 3 |-- arpg
# 11 9 2 1 4 5 +-- trpg
class ReparentingTestCase(TestCase):
"""
Test that trees are in the appropriate state after reparenting and
that reparented items have the correct tree attributes defined,
should they be required for use after a save.
"""
fixtures = ['genres.json']
def test_new_root_from_subtree(self):
shmup = Genre.objects.get(id=6)
shmup.parent = None
shmup.save()
self.assertEqual(get_tree_details([shmup]), '6 - 3 0 1 6')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 10
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
6 - 3 0 1 6
7 6 3 1 2 3
8 6 3 1 4 5"""))
def test_new_root_from_leaf_with_siblings(self):
platformer_2d = Genre.objects.get(id=3)
platformer_2d.parent = None
platformer_2d.save()
self.assertEqual(get_tree_details([platformer_2d]), '3 - 3 0 1 2')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 14
2 1 1 1 2 7
4 2 1 2 3 4
5 2 1 2 5 6
6 1 1 1 8 13
7 6 1 2 9 10
8 6 1 2 11 12
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5
3 - 3 0 1 2"""))
def test_new_child_from_root(self):
action = Genre.objects.get(id=1)
rpg = Genre.objects.get(id=9)
action.parent = rpg
action.save()
self.assertEqual(get_tree_details([action]), '1 9 2 1 6 21')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""9 - 2 0 1 22
10 9 2 1 2 3
11 9 2 1 4 5
1 9 2 1 6 21
2 1 2 2 7 14
3 2 2 3 8 9
4 2 2 3 10 11
5 2 2 3 12 13
6 1 2 2 15 20
7 6 2 3 16 17
8 6 2 3 18 19"""))
def test_move_leaf_to_other_tree(self):
shmup_horizontal = Genre.objects.get(id=8)
rpg = Genre.objects.get(id=9)
shmup_horizontal.parent = rpg
shmup_horizontal.save()
self.assertEqual(get_tree_details([shmup_horizontal]), '8 9 2 1 6 7')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 14
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 1 1 1 10 13
7 6 1 2 11 12
9 - 2 0 1 8
10 9 2 1 2 3
11 9 2 1 4 5
8 9 2 1 6 7"""))
def test_move_subtree_to_other_tree(self):
shmup = Genre.objects.get(id=6)
trpg = Genre.objects.get(id=11)
shmup.parent = trpg
shmup.save()
self.assertEqual(get_tree_details([shmup]), '6 11 2 2 5 10')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 10
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
9 - 2 0 1 12
10 9 2 1 2 3
11 9 2 1 4 11
6 11 2 2 5 10
7 6 2 3 6 7
8 6 2 3 8 9"""))
def test_move_child_up_level(self):
shmup_horizontal = Genre.objects.get(id=8)
action = Genre.objects.get(id=1)
shmup_horizontal.parent = action
shmup_horizontal.save()
self.assertEqual(get_tree_details([shmup_horizontal]), '8 1 1 1 14 15')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 16
2 1 1 1 2 9
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 1 1 1 10 13
7 6 1 2 11 12
8 1 1 1 14 15
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5"""))
def test_move_subtree_down_level(self):
shmup = Genre.objects.get(id=6)
platformer = Genre.objects.get(id=2)
shmup.parent = platformer
shmup.save()
self.assertEqual(get_tree_details([shmup]), '6 2 1 2 9 14')
self.assertEqual(get_tree_details(Genre.tree.all()),
tree_details("""1 - 1 0 1 16
2 1 1 1 2 15
3 2 1 2 3 4
4 2 1 2 5 6
5 2 1 2 7 8
6 2 1 2 9 14
7 6 1 3 10 11
8 6 1 3 12 13
9 - 2 0 1 6
10 9 2 1 2 3
11 9 2 1 4 5"""))
def test_invalid_moves(self):
# A node may not be made a child of itself
action = Genre.objects.get(id=1)
action.parent = action
platformer = Genre.objects.get(id=2)
platformer.parent = platformer
self.assertRaises(InvalidMove, action.save)
self.assertRaises(InvalidMove, platformer.save)
# A node may not be made a child of any of its descendants
platformer_4d = Genre.objects.get(id=5)
action.parent = platformer_4d
platformer.parent = platformer_4d
self.assertRaises(InvalidMove, action.save)
self.assertRaises(InvalidMove, platformer.save)
# New parent is still set when an error occurs
self.assertEquals(action.parent, platformer_4d)
self.assertEquals(platformer.parent, platformer_4d)
# categories.json defines the following tree structure:
#
# 1 - 1 0 1 20 games
# 2 1 1 1 2 7 +-- wii
# 3 2 1 2 3 4 | |-- wii_games
# 4 2 1 2 5 6 | +-- wii_hardware
# 5 1 1 1 8 13 +-- xbox360
# 6 5 1 2 9 10 | |-- xbox360_games
# 7 5 1 2 11 12 | +-- xbox360_hardware
# 8 1 1 1 14 19 +-- ps3
# 9 8 1 2 15 16 |-- ps3_games
# 10 8 1 2 17 18 +-- ps3_hardware
class DeletionTestCase(TestCase):
"""
Tests that the tree structure is maintained appropriately in various
deletion scenrios.
"""
fixtures = ['categories.json']
def test_delete_root_node(self):
# Add a few other roots to verify that they aren't affected
Category(name='Preceding root').insert_at(Category.objects.get(id=1),
'left', commit=True)
Category(name='Following root').insert_at(Category.objects.get(id=1),
'right', commit=True)
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""11 - 1 0 1 2
1 - 2 0 1 20
2 1 2 1 2 7
3 2 2 2 3 4
4 2 2 2 5 6
5 1 2 1 8 13
6 5 2 2 9 10
7 5 2 2 11 12
8 1 2 1 14 19
9 8 2 2 15 16
10 8 2 2 17 18
12 - 3 0 1 2"""),
'Setup for test produced unexpected result')
Category.objects.get(id=1).delete()
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""11 - 1 0 1 2
12 - 3 0 1 2"""))
def test_delete_last_node_with_siblings(self):
Category.objects.get(id=9).delete()
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""1 - 1 0 1 18
2 1 1 1 2 7
3 2 1 2 3 4
4 2 1 2 5 6
5 1 1 1 8 13
6 5 1 2 9 10
7 5 1 2 11 12
8 1 1 1 14 17
10 8 1 2 15 16"""))
def test_delete_last_node_with_descendants(self):
Category.objects.get(id=8).delete()
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""1 - 1 0 1 14
2 1 1 1 2 7
3 2 1 2 3 4
4 2 1 2 5 6
5 1 1 1 8 13
6 5 1 2 9 10
7 5 1 2 11 12"""))
def test_delete_node_with_siblings(self):
Category.objects.get(id=6).delete()
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""1 - 1 0 1 18
2 1 1 1 2 7
3 2 1 2 3 4
4 2 1 2 5 6
5 1 1 1 8 11
7 5 1 2 9 10
8 1 1 1 12 17
9 8 1 2 13 14
10 8 1 2 15 16"""))
def test_delete_node_with_descendants_and_siblings(self):
"""
Regression test for Issue 23 - we used to use pre_delete, which
resulted in tree cleanup being performed for every node being
deleted, rather than just the node on which ``delete()`` was
called.
"""
Category.objects.get(id=5).delete()
self.assertEqual(get_tree_details(Category.tree.all()),
tree_details("""1 - 1 0 1 14
2 1 1 1 2 7
3 2 1 2 3 4
4 2 1 2 5 6
8 1 1 1 8 13
9 8 1 2 9 10
10 8 1 2 11 12"""))
class IntraTreeMovementTestCase(TestCase):
pass
class InterTreeMovementTestCase(TestCase):
pass
class PositionedInsertionTestCase(TestCase):
pass
| bsd-3-clause |
gilbertw/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/palmos.py | 96 | 3019 | """ Python Character Mapping Codec for PalmOS 3.5.
Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='palmos',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
# The PalmOS character set is mostly iso-8859-1 with some differences.
decoding_map.update({
0x0080: 0x20ac, # EURO SIGN
0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x2020, # DAGGER
0x0087: 0x2021, # DOUBLE DAGGER
0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
0x0089: 0x2030, # PER MILLE SIGN
0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
0x008d: 0x2666, # BLACK DIAMOND SUIT
0x008e: 0x2663, # BLACK CLUB SUIT
0x008f: 0x2665, # BLACK HEART SUIT
0x0090: 0x2660, # BLACK SPADE SUIT
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: 0x02dc, # SMALL TILDE
0x0099: 0x2122, # TRADE MARK SIGN
0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x009c: 0x0153, # LATIN SMALL LIGATURE OE
0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
nitin-cherian/LifeLongLearning | Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/pygments/lexers/stata.py | 27 | 3627 | # -*- coding: utf-8 -*-
"""
pygments.lexers.stata
~~~~~~~~~~~~~~~~~~~~~
Lexer for Stata
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
from pygments.lexers._stata_builtins import builtins_base, builtins_functions
__all__ = ['StataLexer']
class StataLexer(RegexLexer):
"""
For `Stata <http://www.stata.com/>`_ do files.
.. versionadded:: 2.2
"""
# Syntax based on
# - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
# - http://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
# - http://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
name = 'Stata'
aliases = ['stata', 'do']
filenames = ['*.do', '*.ado']
mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
tokens = {
'root': [
include('comments'),
include('vars-strings'),
include('numbers'),
include('keywords'),
(r'.', Text),
],
# Global and local macros; regular and special strings
'vars-strings': [
(r'\$[\w{]', Name.Variable.Global, 'var_validglobal'),
(r'`\w{0,31}\'', Name.Variable),
(r'"', String, 'string_dquote'),
(r'`"', String, 'string_mquote'),
],
# For either string type, highlight macros as macros
'string_dquote': [
(r'"', String, '#pop'),
(r'\\\\|\\"|\\\n', String.Escape),
(r'\$', Name.Variable.Global, 'var_validglobal'),
(r'`', Name.Variable, 'var_validlocal'),
(r'[^$`"\\]+', String),
(r'[$"\\]', String),
],
'string_mquote': [
(r'"\'', String, '#pop'),
(r'\\\\|\\"|\\\n', String.Escape),
(r'\$', Name.Variable.Global, 'var_validglobal'),
(r'`', Name.Variable, 'var_validlocal'),
(r'[^$`"\\]+', String),
(r'[$"\\]', String),
],
'var_validglobal': [
(r'\{\w{0,32}\}', Name.Variable.Global, '#pop'),
(r'\w{1,32}', Name.Variable.Global, '#pop'),
],
'var_validlocal': [
(r'\w{0,31}\'', Name.Variable, '#pop'),
],
# * only OK at line start, // OK anywhere
'comments': [
(r'^\s*\*.*$', Comment),
(r'//.*', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/[*](.|\n)*?[*]/', Comment.Multiline),
],
# Built in functions and statements
'keywords': [
(words(builtins_functions, prefix = r'\b', suffix = r'\('),
Name.Function),
(words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
Keyword),
],
# http://www.stata.com/help.cgi?operators
'operators': [
(r'-|==|<=|>=|<|>|&|!=', Operator),
(r'\*|\+|\^|/|!|~|==|~=', Operator)
],
# Stata numbers
'numbers': [
# decimal number
(r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
Number),
],
# Stata formats
'format': [
(r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Variable),
(r'%(21x|16H|16L|8H|8L)', Name.Variable),
(r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg).{0,32}', Name.Variable),
(r'%[-~]?\d{1,4}s', Name.Variable),
]
}
| mit |
OnionIoT/i2c-exp-driver | examples/pwm-exp.py | 1 | 1269 | from OmegaExpansion import pwmExp
import time
import sys
# check the arguments
channel = 0
if len(sys.argv) == 2:
channel = int(sys.argv[1])
print('>> Using channel %d'%channel)
pwmExp.setVerbosity(0)
# check initialization
# should return 0 if the PWM Expansion has just been plugged in
ret = pwmExp.checkInit()
print("checking if initialized: %s"%ret)
# initialize the pwm-exp
ret = pwmExp.driverInit()
print("driverInit return: %s"%ret)
if (ret != 0):
exit()
ret = pwmExp.setFrequency(50)
print("setFrequency return: %s"%ret)
if (ret != 0):
exit()
# check initialization
# should return 1 since the Expansion was initialized above
ret = pwmExp.checkInit()
print("checking if initialized: %s"%ret)
time.sleep(1)
# set channel 0
ret = pwmExp.setupDriver(channel, 9, 0)
print("setupDriver return: %s"%ret)
if (ret != 0):
exit()
time.sleep(2)
# set channel 0
ret = pwmExp.setupDriver(channel, 5, 0)
print("setupDriver return: %s"%ret)
if (ret != 0):
exit()
time.sleep(2)
# change the frequency
ret = pwmExp.setFrequency(60)
print("setFrequency return: %s"%ret)
if (ret != 0):
exit()
time.sleep(2)
# set channel 0
ret = pwmExp.setupDriver(channel, 6.55, 0)
print("setupDriver return: %s"%ret)
if (ret != 0):
exit()
time.sleep(2)
print("Done")
| agpl-3.0 |
carvalhomb/tsmells | guess/src/Lib/xml/dom/html/HTMLAppletElement.py | 2 | 3614 | ########################################################################
#
# File Name: HTMLAppletElement
#
# Documentation: http://docs.4suite.com/4DOM/HTMLAppletElement.html
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLAppletElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="APPLET"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_align(self):
return string.capitalize(self.getAttribute("ALIGN"))
def _set_align(self, value):
self.setAttribute("ALIGN", value)
def _get_alt(self):
return self.getAttribute("ALT")
def _set_alt(self, value):
self.setAttribute("ALT", value)
def _get_archive(self):
return self.getAttribute("ARCHIVE")
def _set_archive(self, value):
self.setAttribute("ARCHIVE", value)
def _get_code(self):
return self.getAttribute("CODE")
def _set_code(self, value):
self.setAttribute("CODE", value)
def _get_codeBase(self):
return self.getAttribute("CODEBASE")
def _set_codeBase(self, value):
self.setAttribute("CODEBASE", value)
def _get_height(self):
return self.getAttribute("HEIGHT")
def _set_height(self, value):
self.setAttribute("HEIGHT", value)
def _get_hspace(self):
return self.getAttribute("HSPACE")
def _set_hspace(self, value):
self.setAttribute("HSPACE", value)
def _get_name(self):
return self.getAttribute("NAME")
def _set_name(self, value):
self.setAttribute("NAME", value)
def _get_object(self):
return self.getAttribute("OBJECT")
def _set_object(self, value):
self.setAttribute("OBJECT", value)
def _get_vspace(self):
return self.getAttribute("VSPACE")
def _set_vspace(self, value):
self.setAttribute("VSPACE", value)
def _get_width(self):
return self.getAttribute("WIDTH")
def _set_width(self, value):
self.setAttribute("WIDTH", value)
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"align" : _get_align,
"alt" : _get_alt,
"archive" : _get_archive,
"code" : _get_code,
"codeBase" : _get_codeBase,
"height" : _get_height,
"hspace" : _get_hspace,
"name" : _get_name,
"object" : _get_object,
"vspace" : _get_vspace,
"width" : _get_width
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"align" : _set_align,
"alt" : _set_alt,
"archive" : _set_archive,
"code" : _set_code,
"codeBase" : _set_codeBase,
"height" : _set_height,
"hspace" : _set_hspace,
"name" : _set_name,
"object" : _set_object,
"vspace" : _set_vspace,
"width" : _set_width
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
| gpl-2.0 |
mrphrazer/miasm | miasm/ir/translators/z3_ir.py | 2 | 10225 | from builtins import map
from builtins import range
import imp
import logging
# Raise an ImportError if z3 is not available WITHOUT actually importing it
imp.find_module("z3")
from miasm.ir.translators.translator import Translator
log = logging.getLogger("translator_z3")
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter("[%(levelname)-8s]: %(message)s"))
log.addHandler(console_handler)
log.setLevel(logging.WARNING)
class Z3Mem(object):
"""Memory abstraction for TranslatorZ3. Memory elements are only accessed,
never written. To give a concrete value for a given memory cell in a solver,
add "mem32.get(address, size) == <value>" constraints to your equation.
The endianness of memory accesses is handled accordingly to the "endianness"
attribute.
Note: Will have one memory space for each addressing size used.
For example, if memory is accessed via 32 bits values and 16 bits values,
these access will not occur in the same address space.
"""
def __init__(self, endianness="<", name="mem"):
"""Initializes a Z3Mem object with a given @name and @endianness.
@endianness: Endianness of memory representation. '<' for little endian,
'>' for big endian.
@name: name of memory Arrays generated. They will be named
name+str(address size) (for example mem32, mem16...).
"""
# Import z3 only on demand
global z3
import z3
if endianness not in ['<', '>']:
raise ValueError("Endianness should be '>' (big) or '<' (little)")
self.endianness = endianness
self.mems = {} # Address size -> memory z3.Array
self.name = name
def get_mem_array(self, size):
"""Returns a z3 Array used internally to represent memory for addresses
of size @size.
@size: integer, size in bit of addresses in the memory to get.
Return a z3 Array: BitVecSort(size) -> BitVecSort(8).
"""
try:
mem = self.mems[size]
except KeyError:
# Lazy instantiation
self.mems[size] = z3.Array(self.name + str(size),
z3.BitVecSort(size),
z3.BitVecSort(8))
mem = self.mems[size]
return mem
def __getitem__(self, addr):
"""One byte memory access. Different address sizes with the same value
will result in different memory accesses.
@addr: a z3 BitVec, the address to read.
Return a z3 BitVec of size 8 bits representing a memory access.
"""
size = addr.size()
mem = self.get_mem_array(size)
return mem[addr]
def get(self, addr, size):
""" Memory access at address @addr of size @size.
@addr: a z3 BitVec, the address to read.
@size: int, size of the read in bits.
Return a z3 BitVec of size @size representing a memory access.
"""
original_size = size
if original_size % 8 != 0:
# Size not aligned on 8bits -> read more than size and extract after
size = ((original_size // 8) + 1) * 8
res = self[addr]
if self.is_little_endian():
for i in range(1, size // 8):
res = z3.Concat(self[addr+i], res)
else:
for i in range(1, size //8):
res = z3.Concat(res, self[addr+i])
if size == original_size:
return res
else:
# Size not aligned, extract right sized result
return z3.Extract(original_size-1, 0, res)
def is_little_endian(self):
"""True if this memory is little endian."""
return self.endianness == "<"
def is_big_endian(self):
"""True if this memory is big endian."""
return not self.is_little_endian()
class TranslatorZ3(Translator):
"""Translate a Miasm expression to an equivalent z3 python binding
expression. Memory is abstracted via z3.Array (see Z3Mem).
The result of from_expr will be a z3 Expr.
If you want to interact with the memory abstraction after the translation,
you can instantiate your own Z3Mem, that will be equivalent to the one
used by TranslatorZ3.
"""
# Implemented language
__LANG__ = "z3"
# Operations translation
trivial_ops = ["+", "-", "/", "%", "&", "^", "|", "*", "<<"]
def __init__(self, endianness="<", loc_db=None, **kwargs):
"""Instance a Z3 translator
@endianness: (optional) memory endianness
"""
# Import z3 only on demand
global z3
import z3
super(TranslatorZ3, self).__init__(**kwargs)
self._mem = Z3Mem(endianness)
self.loc_db = loc_db
def from_ExprInt(self, expr):
return z3.BitVecVal(int(expr), expr.size)
def from_ExprId(self, expr):
return z3.BitVec(str(expr), expr.size)
def from_ExprLoc(self, expr):
if self.loc_db is None:
# No loc_db, fallback to default name
return z3.BitVec(str(expr), expr.size)
loc_key = expr.loc_key
offset = self.loc_db.get_location_offset(loc_key)
if offset is not None:
return z3.BitVecVal(offset, expr.size)
# fallback to default name
return z3.BitVec(str(loc_key), expr.size)
def from_ExprMem(self, expr):
addr = self.from_expr(expr.ptr)
return self._mem.get(addr, expr.size)
def from_ExprSlice(self, expr):
res = self.from_expr(expr.arg)
res = z3.Extract(expr.stop-1, expr.start, res)
return res
def from_ExprCompose(self, expr):
res = None
for arg in expr.args:
e = z3.Extract(arg.size-1, 0, self.from_expr(arg))
if res != None:
res = z3.Concat(e, res)
else:
res = e
return res
def from_ExprCond(self, expr):
cond = self.from_expr(expr.cond)
src1 = self.from_expr(expr.src1)
src2 = self.from_expr(expr.src2)
return z3.If(cond != 0, src1, src2)
def _abs(self, z3_value):
return z3.If(z3_value >= 0,z3_value,-z3_value)
def _sdivC(self, num, den):
"""Divide (signed) @num by @den (z3 values) as C would
See modint.__div__ for implementation choice
"""
result_sign = z3.If(num * den >= 0,
z3.BitVecVal(1, num.size()),
z3.BitVecVal(-1, num.size()),
)
return z3.UDiv(self._abs(num), self._abs(den)) * result_sign
def from_ExprOp(self, expr):
args = list(map(self.from_expr, expr.args))
res = args[0]
if len(args) > 1:
for arg in args[1:]:
if expr.op in self.trivial_ops:
res = eval("res %s arg" % expr.op)
elif expr.op == ">>":
res = z3.LShR(res, arg)
elif expr.op == "a>>":
res = res >> arg
elif expr.op == "<<<":
res = z3.RotateLeft(res, arg)
elif expr.op == ">>>":
res = z3.RotateRight(res, arg)
elif expr.op == "sdiv":
res = self._sdivC(res, arg)
elif expr.op == "udiv":
res = z3.UDiv(res, arg)
elif expr.op == "smod":
res = res - (arg * (self._sdivC(res, arg)))
elif expr.op == "umod":
res = z3.URem(res, arg)
elif expr.op == "==":
res = z3.If(
args[0] == args[1],
z3.BitVecVal(1, 1),
z3.BitVecVal(0, 1)
)
elif expr.op == "<u":
res = z3.If(
z3.ULT(args[0], args[1]),
z3.BitVecVal(1, 1),
z3.BitVecVal(0, 1)
)
elif expr.op == "<s":
res = z3.If(
args[0] < args[1],
z3.BitVecVal(1, 1),
z3.BitVecVal(0, 1)
)
elif expr.op == "<=u":
res = z3.If(
z3.ULE(args[0], args[1]),
z3.BitVecVal(1, 1),
z3.BitVecVal(0, 1)
)
elif expr.op == "<=s":
res = z3.If(
args[0] <= args[1],
z3.BitVecVal(1, 1),
z3.BitVecVal(0, 1)
)
else:
raise NotImplementedError("Unsupported OP yet: %s" % expr.op)
elif expr.op == 'parity':
arg = z3.Extract(7, 0, res)
res = z3.BitVecVal(1, 1)
for i in range(8):
res = res ^ z3.Extract(i, i, arg)
elif expr.op == '-':
res = -res
elif expr.op == "cnttrailzeros":
size = expr.size
src = res
res = z3.If(src == 0, size, src)
for i in range(size - 1, -1, -1):
res = z3.If((src & (1 << i)) != 0, i, res)
elif expr.op == "cntleadzeros":
size = expr.size
src = res
res = z3.If(src == 0, size, src)
for i in range(size, 0, -1):
index = - i % size
out = size - (index + 1)
res = z3.If((src & (1 << index)) != 0, out, res)
elif expr.op.startswith("zeroExt"):
arg, = expr.args
res = z3.ZeroExt(expr.size - arg.size, self.from_expr(arg))
elif expr.op.startswith("signExt"):
arg, = expr.args
res = z3.SignExt(expr.size - arg.size, self.from_expr(arg))
else:
raise NotImplementedError("Unsupported OP yet: %s" % expr.op)
return res
def from_ExprAssign(self, expr):
src = self.from_expr(expr.src)
dst = self.from_expr(expr.dst)
return (src == dst)
# Register the class
Translator.register(TranslatorZ3)
| gpl-2.0 |
srivassumit/servo | tests/wpt/css-tests/tools/py/testing/path/common.py | 162 | 15649 | import py
import sys
class CommonFSTests(object):
def test_constructor_equality(self, path1):
p = path1.__class__(path1)
assert p == path1
def test_eq_nonstring(self, path1):
p1 = path1.join('sampledir')
p2 = path1.join('sampledir')
assert p1 == p2
def test_new_identical(self, path1):
assert path1 == path1.new()
def test_join(self, path1):
p = path1.join('sampledir')
strp = str(p)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_normalized(self, path1):
newpath = path1.join(path1.sep+'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
newpath = path1.join((path1.sep*2) + 'sampledir')
strp = str(newpath)
assert strp.endswith('sampledir')
assert strp.startswith(str(path1))
def test_join_noargs(self, path1):
newpath = path1.join()
assert path1 == newpath
def test_add_something(self, path1):
p = path1.join('sample')
p = p + 'dir'
assert p.check()
assert p.exists()
assert p.isdir()
assert not p.isfile()
def test_parts(self, path1):
newpath = path1.join('sampledir', 'otherfile')
par = newpath.parts()[-3:]
assert par == [path1, path1.join('sampledir'), newpath]
revpar = newpath.parts(reverse=True)[:3]
assert revpar == [newpath, path1.join('sampledir'), path1]
def test_common(self, path1):
other = path1.join('sampledir')
x = other.common(path1)
assert x == path1
#def test_parents_nonexisting_file(self, path1):
# newpath = path1 / 'dirnoexist' / 'nonexisting file'
# par = list(newpath.parents())
# assert par[:2] == [path1 / 'dirnoexist', path1]
def test_basename_checks(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.check(notbasename='xyz')
assert newpath.basename == 'sampledir'
def test_basename(self, path1):
newpath = path1.join('sampledir')
assert newpath.check(basename='sampledir')
assert newpath.basename, 'sampledir'
def test_dirname(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirname == str(path1)
def test_dirpath(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath() == path1
def test_dirpath_with_args(self, path1):
newpath = path1.join('sampledir')
assert newpath.dirpath('x') == path1.join('x')
def test_newbasename(self, path1):
newpath = path1.join('samplefile')
newbase = newpath.new(basename="samplefile2")
assert newbase.basename == "samplefile2"
assert newbase.dirpath() == newpath.dirpath()
def test_not_exists(self, path1):
assert not path1.join('does_not_exist').check()
assert path1.join('does_not_exist').check(exists=0)
def test_exists(self, path1):
assert path1.join("samplefile").check()
assert path1.join("samplefile").check(exists=1)
assert path1.join("samplefile").exists()
assert path1.join("samplefile").isfile()
assert not path1.join("samplefile").isdir()
def test_dir(self, path1):
#print repr(path1.join("sampledir"))
assert path1.join("sampledir").check(dir=1)
assert path1.join('samplefile').check(notdir=1)
assert not path1.join("samplefile").check(dir=1)
assert path1.join("samplefile").exists()
assert not path1.join("samplefile").isdir()
assert path1.join("samplefile").isfile()
def test_fnmatch_file(self, path1):
assert path1.join("samplefile").check(fnmatch='s*e')
assert path1.join("samplefile").fnmatch('s*e')
assert not path1.join("samplefile").fnmatch('s*x')
assert not path1.join("samplefile").check(fnmatch='s*x')
#def test_fnmatch_dir(self, path1):
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_relto(self, path1):
l=path1.join("sampledir", "otherfile")
assert l.relto(path1) == l.sep.join(["sampledir", "otherfile"])
assert l.check(relto=path1)
assert path1.check(notrelto=l)
assert not path1.check(relto=l)
def test_bestrelpath(self, path1):
curdir = path1
sep = curdir.sep
s = curdir.bestrelpath(curdir)
assert s == "."
s = curdir.bestrelpath(curdir.join("hello", "world"))
assert s == "hello" + sep + "world"
s = curdir.bestrelpath(curdir.dirpath().join("sister"))
assert s == ".." + sep + "sister"
assert curdir.bestrelpath(curdir.dirpath()) == ".."
assert curdir.bestrelpath("hello") == "hello"
def test_relto_not_relative(self, path1):
l1=path1.join("bcde")
l2=path1.join("b")
assert not l1.relto(l2)
assert not l2.relto(l1)
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_listdir(self, path1):
l = path1.listdir()
assert path1.join('sampledir') in l
assert path1.join('samplefile') in l
py.test.raises(py.error.ENOTDIR,
"path1.join('samplefile').listdir()")
def test_listdir_fnmatchstring(self, path1):
l = path1.listdir('s*dir')
assert len(l)
assert l[0], path1.join('sampledir')
def test_listdir_filter(self, path1):
l = path1.listdir(lambda x: x.check(dir=1))
assert path1.join('sampledir') in l
assert not path1.join('samplefile') in l
def test_listdir_sorted(self, path1):
l = path1.listdir(lambda x: x.check(basestarts="sample"), sort=True)
assert path1.join('sampledir') == l[0]
assert path1.join('samplefile') == l[1]
assert path1.join('samplepickle') == l[2]
def test_visit_nofilter(self, path1):
l = []
for i in path1.visit():
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_norecurse(self, path1):
l = []
for i in path1.visit(None, lambda x: x.basename != "sampledir"):
l.append(i.relto(path1))
assert "sampledir" in l
assert not path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_filterfunc_is_string(self, path1):
l = []
for i in path1.visit('*dir'):
l.append(i.relto(path1))
assert len(l), 2
assert "sampledir" in l
assert "otherdir" in l
@py.test.mark.xfail("sys.platform.startswith('java')")
def test_visit_ignore(self, path1):
p = path1.join('nonexisting')
assert list(p.visit(ignore=py.error.ENOENT)) == []
def test_visit_endswith(self, path1):
l = []
for i in path1.visit(lambda x: x.check(endswith="file")):
l.append(i.relto(path1))
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert "samplefile" in l
def test_endswith(self, path1):
assert path1.check(notendswith='.py')
x = path1.join('samplefile')
assert x.check(endswith='file')
def test_cmp(self, path1):
path1 = path1.join('samplefile')
path2 = path1.join('samplefile2')
assert (path1 < path2) == ('samplefile' < 'samplefile2')
assert not (path1 < path1)
def test_simple_read(self, path1):
x = path1.join('samplefile').read('r')
assert x == 'samplefile\n'
def test_join_div_operator(self, path1):
newpath = path1 / '/sampledir' / '/test//'
newpath2 = path1.join('sampledir', 'test')
assert newpath == newpath2
def test_ext(self, path1):
newpath = path1.join('sampledir.ext')
assert newpath.ext == '.ext'
newpath = path1.join('sampledir')
assert not newpath.ext
def test_purebasename(self, path1):
newpath = path1.join('samplefile.py')
assert newpath.purebasename == 'samplefile'
def test_multiple_parts(self, path1):
newpath = path1.join('samplefile.py')
dirname, purebasename, basename, ext = newpath._getbyspec(
'dirname,purebasename,basename,ext')
assert str(path1).endswith(dirname) # be careful with win32 'drive'
assert purebasename == 'samplefile'
assert basename == 'samplefile.py'
assert ext == '.py'
def test_dotted_name_ext(self, path1):
newpath = path1.join('a.b.c')
ext = newpath.ext
assert ext == '.c'
assert newpath.ext == '.c'
def test_newext(self, path1):
newpath = path1.join('samplefile.py')
newext = newpath.new(ext='.txt')
assert newext.basename == "samplefile.txt"
assert newext.purebasename == "samplefile"
def test_readlines(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines()
assert contents == ['samplefile\n']
def test_readlines_nocr(self, path1):
fn = path1.join('samplefile')
contents = fn.readlines(cr=0)
assert contents == ['samplefile', '']
def test_file(self, path1):
assert path1.join('samplefile').check(file=1)
def test_not_file(self, path1):
assert not path1.join("sampledir").check(file=1)
assert path1.join("sampledir").check(file=0)
def test_non_existent(self, path1):
assert path1.join("sampledir.nothere").check(dir=0)
assert path1.join("sampledir.nothere").check(file=0)
assert path1.join("sampledir.nothere").check(notfile=1)
assert path1.join("sampledir.nothere").check(notdir=1)
assert path1.join("sampledir.nothere").check(notexists=1)
assert not path1.join("sampledir.nothere").check(notfile=0)
# pattern = path1.sep.join(['s*file'])
# sfile = path1.join("samplefile")
# assert sfile.check(fnmatch=pattern)
def test_size(self, path1):
url = path1.join("samplefile")
assert url.size() > len("samplefile")
def test_mtime(self, path1):
url = path1.join("samplefile")
assert url.mtime() > 0
def test_relto_wrong_type(self, path1):
py.test.raises(TypeError, "path1.relto(42)")
def test_load(self, path1):
p = path1.join('samplepickle')
obj = p.load()
assert type(obj) is dict
assert obj.get('answer',None) == 42
def test_visit_filesonly(self, path1):
l = []
for i in path1.visit(lambda x: x.check(file=1)):
l.append(i.relto(path1))
assert not "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
def test_visit_nodotfiles(self, path1):
l = []
for i in path1.visit(lambda x: x.check(dotfile=0)):
l.append(i.relto(path1))
assert "sampledir" in l
assert path1.sep.join(["sampledir", "otherfile"]) in l
assert not ".dotfile" in l
def test_visit_breadthfirst(self, path1):
l = []
for i in path1.visit(bf=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
for j in range(i, len(l)):
assert path1.sep in l[j]
break
else:
py.test.fail("huh")
def test_visit_sort(self, path1):
l = []
for i in path1.visit(bf=True, sort=True):
l.append(i.relto(path1))
for i, p in enumerate(l):
if path1.sep in p:
break
assert l[:i] == sorted(l[:i])
assert l[i:] == sorted(l[i:])
def test_endswith(self, path1):
def chk(p):
return p.check(endswith="pickle")
assert not chk(path1)
assert not chk(path1.join('samplefile'))
assert chk(path1.join('somepickle'))
def test_copy_file(self, path1):
otherdir = path1.join('otherdir')
initpy = otherdir.join('__init__.py')
copied = otherdir.join('copied')
initpy.copy(copied)
try:
assert copied.check()
s1 = initpy.read()
s2 = copied.read()
assert s1 == s2
finally:
if copied.check():
copied.remove()
def test_copy_dir(self, path1):
otherdir = path1.join('otherdir')
copied = path1.join('newdir')
try:
otherdir.copy(copied)
assert copied.check(dir=1)
assert copied.join('__init__.py').check(file=1)
s1 = otherdir.join('__init__.py').read()
s2 = copied.join('__init__.py').read()
assert s1 == s2
finally:
if copied.check(dir=1):
copied.remove(rec=1)
def test_remove_file(self, path1):
d = path1.ensure('todeleted')
assert d.check()
d.remove()
assert not d.check()
def test_remove_dir_recursive_by_default(self, path1):
d = path1.ensure('to', 'be', 'deleted')
assert d.check()
p = path1.join('to')
p.remove()
assert not p.check()
def test_ensure_dir(self, path1):
b = path1.ensure_dir("001", "002")
assert b.basename == "002"
assert b.isdir()
def test_mkdir_and_remove(self, path1):
tmpdir = path1
py.test.raises(py.error.EEXIST, tmpdir.mkdir, 'sampledir')
new = tmpdir.join('mktest1')
new.mkdir()
assert new.check(dir=1)
new.remove()
new = tmpdir.mkdir('mktest')
assert new.check(dir=1)
new.remove()
assert tmpdir.join('mktest') == new
def test_move_file(self, path1):
p = path1.join('samplefile')
newp = p.dirpath('moved_samplefile')
p.move(newp)
try:
assert newp.check(file=1)
assert not p.check()
finally:
dp = newp.dirpath()
if hasattr(dp, 'revert'):
dp.revert()
else:
newp.move(p)
assert p.check()
def test_move_dir(self, path1):
source = path1.join('sampledir')
dest = path1.join('moveddir')
source.move(dest)
assert dest.check(dir=1)
assert dest.join('otherfile').check(file=1)
assert not source.join('sampledir').check()
def setuptestfs(path):
if path.join('samplefile').check():
return
#print "setting up test fs for", repr(path)
samplefile = path.ensure('samplefile')
samplefile.write('samplefile\n')
execfile = path.ensure('execfile')
execfile.write('x=42')
execfilepy = path.ensure('execfile.py')
execfilepy.write('x=42')
d = {1:2, 'hello': 'world', 'answer': 42}
path.ensure('samplepickle').dump(d)
sampledir = path.ensure('sampledir', dir=1)
sampledir.ensure('otherfile')
otherdir = path.ensure('otherdir', dir=1)
otherdir.ensure('__init__.py')
module_a = otherdir.ensure('a.py')
if sys.version_info >= (2,6):
module_a.write('from .b import stuff as result\n')
else:
module_a.write('from b import stuff as result\n')
module_b = otherdir.ensure('b.py')
module_b.write('stuff="got it"\n')
module_c = otherdir.ensure('c.py')
module_c.write('''import py;
import otherdir.a
value = otherdir.a.result
''')
module_d = otherdir.ensure('d.py')
module_d.write('''import py;
from otherdir import a
value2 = a.result
''')
| mpl-2.0 |
jokey2k/ShockGsite | djangobb_forum/models.py | 1 | 16225 | from datetime import datetime
import os
import os.path
from django.db import models
from django.contrib.auth.models import User, Group
from django.utils.html import escape
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.hashcompat import sha_constructor
from djangobb_forum.fields import AutoOneToOneField, ExtendedImageField, JSONField
from djangobb_forum.util import smiles, convert_text_to_html
from djangobb_forum import settings as forum_settings
if 'south' in settings.INSTALLED_APPS:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ['^djangobb_forum\.fields\.AutoOneToOneField',
'^djangobb_forum\.fields\.JSONField',
'^djangobb_forum\.fields\.ExtendedImageField'])
TZ_CHOICES = [(float(x[0]), x[1]) for x in (
(-12, '-12'), (-11, '-11'), (-10, '-10'), (-9.5, '-09.5'), (-9, '-09'),
(-8.5, '-08.5'), (-8, '-08 PST'), (-7, '-07 MST'), (-6, '-06 CST'),
(-5, '-05 EST'), (-4, '-04 AST'), (-3.5, '-03.5'), (-3, '-03 ADT'),
(-2, '-02'), (-1, '-01'), (0, '00 GMT'), (1, '+01 CET'), (2, '+02'),
(3, '+03'), (3.5, '+03.5'), (4, '+04'), (4.5, '+04.5'), (5, '+05'),
(5.5, '+05.5'), (6, '+06'), (6.5, '+06.5'), (7, '+07'), (8, '+08'),
(9, '+09'), (9.5, '+09.5'), (10, '+10'), (10.5, '+10.5'), (11, '+11'),
(11.5, '+11.5'), (12, '+12'), (13, '+13'), (14, '+14'),
)]
SIGN_CHOICES = (
(1, 'PLUS'),
(-1, 'MINUS'),
)
PRIVACY_CHOICES = (
(0, _(u'Display your e-mail address.')),
(1, _(u'Hide your e-mail address but allow form e-mail.')),
(2, _(u'Hide your e-mail address and disallow form e-mail.')),
)
MARKUP_CHOICES = [('bbcode', 'bbcode')]
try:
import markdown
MARKUP_CHOICES.append(("markdown", "markdown"))
except ImportError:
pass
path = os.path.join(settings.MEDIA_ROOT, 'forum', 'themes')
THEME_CHOICES = [('default','default')] #[(theme, theme) for theme in os.listdir(path)
# if os.path.isdir(os.path.join(path, theme))]
class Category(models.Model):
name = models.CharField(_('Name'), max_length=80)
groups = models.ManyToManyField(Group,blank=True, null=True, verbose_name=_('Groups'), help_text=_('Only users from these groups can see this category'))
position = models.IntegerField(_('Position'), blank=True, default=0)
class Meta:
ordering = ['position']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __unicode__(self):
return self.name
def forum_count(self):
return self.forums.all().count()
@property
def topics(self):
return Topic.objects.filter(forum__category__id=self.id).select_related()
@property
def posts(self):
return Post.objects.filter(topic__forum__category__id=self.id).select_related()
def has_access(self, user):
if self.groups.exists():
if user.is_authenticated():
if not self.groups.filter(user__pk=user.id).exists():
return False
else:
return False
return True
class Forum(models.Model):
category = models.ForeignKey(Category, related_name='forums', verbose_name=_('Category'))
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
description = models.TextField(_('Description'), blank=True, default='')
moderators = models.ManyToManyField(User, blank=True, null=True, verbose_name=_('Moderators'))
updated = models.DateTimeField(_('Updated'), auto_now=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
topic_count = models.IntegerField(_('Topic count'), blank=True, default=0)
last_post = models.ForeignKey('Post', related_name='last_forum_post', blank=True, null=True)
class Meta:
ordering = ['position']
verbose_name = _('Forum')
verbose_name_plural = _('Forums')
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('djangobb:forum', [self.id])
@property
def posts(self):
return Post.objects.filter(topic__forum__id=self.id).select_related()
class Topic(models.Model):
forum = models.ForeignKey(Forum, related_name='topics', verbose_name=_('Forum'))
name = models.CharField(_('Subject'), max_length=255)
created = models.DateTimeField(_('Created'), auto_now_add=True)
updated = models.DateTimeField(_('Updated'), null=True)
user = models.ForeignKey(User, verbose_name=_('User'))
views = models.IntegerField(_('Views count'), blank=True, default=0)
sticky = models.BooleanField(_('Sticky'), blank=True, default=False)
closed = models.BooleanField(_('Closed'), blank=True, default=False)
subscribers = models.ManyToManyField(User, related_name='subscriptions', verbose_name=_('Subscribers'), blank=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
last_post = models.ForeignKey('Post', related_name='last_topic_post', blank=True, null=True)
class Meta:
ordering = ['-updated']
get_latest_by = 'updated'
verbose_name = _('Topic')
verbose_name_plural = _('Topics')
def __unicode__(self):
return self.name
def delete(self, *args, **kwargs):
try:
last_post = self.posts.latest()
except Post.DoesNotExist:
pass
else:
last_post.last_forum_post.clear()
forum = self.forum
super(Topic, self).delete(*args, **kwargs)
try:
forum.last_post = Topic.objects.filter(forum__id=forum.id).latest().last_post
except Topic.DoesNotExist:
forum.last_post = None
forum.topic_count = Topic.objects.filter(forum__id=forum.id).count()
forum.post_count = Post.objects.filter(topic__forum__id=forum.id).count()
forum.save()
@property
def head(self):
try:
return self.posts.select_related().order_by('created')[0]
except IndexError:
return None
@property
def reply_count(self):
return self.post_count - 1
@models.permalink
def get_absolute_url(self):
return ('djangobb:topic', [self.id])
def update_read(self, user):
tracking = user.posttracking
#if last_read > last_read - don't check topics
if tracking.last_read and (tracking.last_read > self.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if exist new post or does't exist in dict
if self.last_post.id > tracking.topics.get(str(self.id), 0):
tracking.topics[str(self.id)] = self.last_post.id
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {self.id: self.last_post.id}
tracking.save()
class Post(models.Model):
topic = models.ForeignKey(Topic, related_name='posts', verbose_name=_('Topic'))
user = models.ForeignKey(User, related_name='posts', verbose_name=_('User'))
created = models.DateTimeField(_('Created'), auto_now_add=True)
updated = models.DateTimeField(_('Updated'), blank=True, null=True)
updated_by = models.ForeignKey(User, verbose_name=_('Updated by'), blank=True, null=True)
markup = models.CharField(_('Markup'), max_length=15, default=forum_settings.DEFAULT_MARKUP, choices=MARKUP_CHOICES)
body = models.TextField(_('Message'))
body_html = models.TextField(_('HTML version'))
user_ip = models.IPAddressField(_('User IP'), blank=True, null=True)
class Meta:
ordering = ['created']
get_latest_by = 'created'
verbose_name = _('Post')
verbose_name_plural = _('Posts')
def save(self, *args, **kwargs):
self.body_html = convert_text_to_html(self.body, self.markup)
if forum_settings.SMILES_SUPPORT and self.user.forum_profile.show_smilies:
self.body_html = smiles(self.body_html)
super(Post, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self_id = self.id
head_post_id = self.topic.posts.order_by('created')[0].id
forum = self.topic.forum
topic = self.topic
profile = self.user.forum_profile
self.last_topic_post.clear()
self.last_forum_post.clear()
super(Post, self).delete(*args, **kwargs)
#if post was last in topic - remove topic
if self_id == head_post_id:
topic.delete()
else:
try:
topic.last_post = Post.objects.filter(topic__id=topic.id).latest()
except Post.DoesNotExist:
topic.last_post = None
topic.post_count = Post.objects.filter(topic__id=topic.id).count()
topic.save()
try:
forum.last_post = Post.objects.filter(topic__forum__id=forum.id).latest()
except Post.DoesNotExist:
forum.last_post = None
#TODO: for speedup - save/update only changed fields
forum.post_count = Post.objects.filter(topic__forum__id=forum.id).count()
forum.topic_count = Topic.objects.filter(forum__id=forum.id).count()
forum.save()
profile.post_count = Post.objects.filter(user__id=self.user_id).count()
profile.save()
@models.permalink
def get_absolute_url(self):
return ('djangobb:post', [self.id])
def summary(self):
LIMIT = 50
tail = len(self.body) > LIMIT and '...' or ''
return self.body[:LIMIT] + tail
__unicode__ = summary
class Reputation(models.Model):
from_user = models.ForeignKey(User, related_name='reputations_from', verbose_name=_('From'))
to_user = models.ForeignKey(User, related_name='reputations_to', verbose_name=_('To'))
post = models.ForeignKey(Post, related_name='post', verbose_name=_('Post'))
time = models.DateTimeField(_('Time'), auto_now_add=True)
sign = models.IntegerField(_('Sign'), choices=SIGN_CHOICES, default=0)
reason = models.TextField(_('Reason'), max_length=1000)
class Meta:
verbose_name = _('Reputation')
verbose_name_plural = _('Reputations')
unique_together = (('from_user', 'post'),)
def __unicode__(self):
return u'T[%d], FU[%d], TU[%d]: %s' % (self.post.id, self.from_user.id, self.to_user.id, unicode(self.time))
class Profile(models.Model):
user = AutoOneToOneField(User, related_name='forum_profile', verbose_name=_('User'))
status = models.CharField(_('Status'), max_length=30, blank=True)
site = models.URLField(_('Site'), verify_exists=False, blank=True)
jabber = models.CharField(_('Jabber'), max_length=80, blank=True)
icq = models.CharField(_('ICQ'), max_length=12, blank=True)
msn = models.CharField(_('MSN'), max_length=80, blank=True)
aim = models.CharField(_('AIM'), max_length=80, blank=True)
yahoo = models.CharField(_('Yahoo'), max_length=80, blank=True)
xfire = models.CharField(_('Xfire'), max_length=80, blank=True)
location = models.CharField(_('Location'), max_length=30, blank=True)
signature = models.TextField(_('Signature'), blank=True, default='', max_length=forum_settings.SIGNATURE_MAX_LENGTH)
time_zone = models.FloatField(_('Time zone'), choices=TZ_CHOICES, default=float(forum_settings.DEFAULT_TIME_ZONE))
language = models.CharField(_('Language'), max_length=5, default='', choices=settings.LANGUAGES)
avatar = ExtendedImageField(_('Avatar'), blank=True, default='', upload_to=forum_settings.AVATARS_UPLOAD_TO, width=forum_settings.AVATAR_WIDTH, height=forum_settings.AVATAR_HEIGHT)
theme = models.CharField(_('Theme'), choices=THEME_CHOICES, max_length=80, default='default')
show_avatar = models.BooleanField(_('Show avatar'), blank=True, default=True)
show_signatures = models.BooleanField(_('Show signatures'), blank=True, default=True)
show_smilies = models.BooleanField(_('Show smilies'), blank=True, default=True)
privacy_permission = models.IntegerField(_('Privacy permission'), choices=PRIVACY_CHOICES, default=1)
markup = models.CharField(_('Default markup'), max_length=15, default=forum_settings.DEFAULT_MARKUP, choices=MARKUP_CHOICES)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
birthday = models.DateField(_('Birthday'), blank=True, null=True)
class Meta:
verbose_name = _('Profile')
verbose_name_plural = _('Profiles')
def last_post(self):
posts = Post.objects.filter(user__id=self.user_id).order_by('-created')
if posts:
return posts[0].created
else:
return None
def reply_count_minus(self):
return Reputation.objects.filter(to_user__id=self.user_id, sign=-1).count()
def reply_count_plus(self):
return Reputation.objects.filter(to_user__id=self.user_id, sign=1).count()
class PostTracking(models.Model):
"""
Model for tracking read/unread posts.
In topics stored ids of topics and last_posts as dict.
"""
user = AutoOneToOneField(User)
topics = JSONField(null=True)
last_read = models.DateTimeField(null=True)
class Meta:
verbose_name = _('Post tracking')
verbose_name_plural = _('Post tracking')
def __unicode__(self):
return self.user.username
class Report(models.Model):
reported_by = models.ForeignKey(User, related_name='reported_by', verbose_name=_('Reported by'))
post = models.ForeignKey(Post, verbose_name=_('Post'))
zapped = models.BooleanField(_('Zapped'), blank=True, default=False)
zapped_by = models.ForeignKey(User, related_name='zapped_by', blank=True, null=True, verbose_name=_('Zapped by'))
created = models.DateTimeField(_('Created'), blank=True)
reason = models.TextField(_('Reason'), blank=True, default='', max_length='1000')
class Meta:
verbose_name = _('Report')
verbose_name_plural = _('Reports')
def __unicode__(self):
return u'%s %s' % (self.reported_by ,self.zapped)
class Ban(models.Model):
user = models.OneToOneField(User, verbose_name=_('Banned user'), related_name='ban_users')
ban_start = models.DateTimeField(_('Ban start'), default=datetime.now)
ban_end = models.DateTimeField(_('Ban end'), blank=True, null=True)
reason = models.TextField(_('Reason'))
class Meta:
verbose_name = _('Ban')
verbose_name_plural = _('Bans')
def __unicode__(self):
return self.user.username
def save(self, *args, **kwargs):
self.user.is_active = False
self.user.save()
super(Ban, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
self.user.is_active = True
self.user.save()
super(Ban, self).delete(*args, **kwargs)
class Attachment(models.Model):
post = models.ForeignKey(Post, verbose_name=_('Post'), related_name='attachments')
size = models.IntegerField(_('Size'))
content_type = models.CharField(_('Content type'), max_length=255)
path = models.CharField(_('Path'), max_length=255)
name = models.TextField(_('Name'))
hash = models.CharField(_('Hash'), max_length=40, blank=True, default='', db_index=True)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
super(Attachment, self).save(*args, **kwargs)
if not self.hash:
self.hash = sha_constructor(str(self.id) + settings.SECRET_KEY).hexdigest()
super(Attachment, self).save(*args, **kwargs)
@models.permalink
def get_absolute_url(self):
return ('djangobb:forum_attachment', [self.hash])
def get_absolute_path(self):
return os.path.join(settings.MEDIA_ROOT, forum_settings.ATTACHMENT_UPLOAD_TO,
self.path)
| bsd-3-clause |
thaumos/ansible | lib/ansible/modules/network/fortios/fortios_router_ospf.py | 21 | 42092 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_router_ospf
short_description: Configure OSPF in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify router feature and ospf category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
router_ospf:
description:
- Configure OSPF.
default: null
suboptions:
abr-type:
description:
- Area border router type.
choices:
- cisco
- ibm
- shortcut
- standard
area:
description:
- OSPF area configuration.
suboptions:
authentication:
description:
- Authentication type.
choices:
- none
- text
- md5
default-cost:
description:
- Summary default cost of stub or NSSA area.
filter-list:
description:
- OSPF area filter-list configuration.
suboptions:
direction:
description:
- Direction.
choices:
- in
- out
id:
description:
- Filter list entry ID.
required: true
list:
description:
- Access-list or prefix-list name. Source router.access-list.name router.prefix-list.name.
id:
description:
- Area entry IP address.
required: true
nssa-default-information-originate:
description:
- Redistribute, advertise, or do not originate Type-7 default route into NSSA area.
choices:
- enable
- always
- disable
nssa-default-information-originate-metric:
description:
- OSPF default metric.
nssa-default-information-originate-metric-type:
description:
- OSPF metric type for default routes.
choices:
- 1
- 2
nssa-redistribution:
description:
- Enable/disable redistribute into NSSA area.
choices:
- enable
- disable
nssa-translator-role:
description:
- NSSA translator role type.
choices:
- candidate
- never
- always
range:
description:
- OSPF area range configuration.
suboptions:
advertise:
description:
- Enable/disable advertise status.
choices:
- disable
- enable
id:
description:
- Range entry ID.
required: true
prefix:
description:
- Prefix.
substitute:
description:
- Substitute prefix.
substitute-status:
description:
- Enable/disable substitute status.
choices:
- enable
- disable
shortcut:
description:
- Enable/disable shortcut option.
choices:
- disable
- enable
- default
stub-type:
description:
- Stub summary setting.
choices:
- no-summary
- summary
type:
description:
- Area type setting.
choices:
- regular
- nssa
- stub
virtual-link:
description:
- OSPF virtual link configuration.
suboptions:
authentication:
description:
- Authentication type.
choices:
- none
- text
- md5
authentication-key:
description:
- Authentication key.
dead-interval:
description:
- Dead interval.
hello-interval:
description:
- Hello interval.
md5-key:
description:
- MD5 key.
name:
description:
- Virtual link entry name.
required: true
peer:
description:
- Peer IP.
retransmit-interval:
description:
- Retransmit interval.
transmit-delay:
description:
- Transmit delay.
auto-cost-ref-bandwidth:
description:
- Reference bandwidth in terms of megabits per second.
bfd:
description:
- Bidirectional Forwarding Detection (BFD).
choices:
- enable
- disable
database-overflow:
description:
- Enable/disable database overflow.
choices:
- enable
- disable
database-overflow-max-lsas:
description:
- Database overflow maximum LSAs.
database-overflow-time-to-recover:
description:
- Database overflow time to recover (sec).
default-information-metric:
description:
- Default information metric.
default-information-metric-type:
description:
- Default information metric type.
choices:
- 1
- 2
default-information-originate:
description:
- Enable/disable generation of default route.
choices:
- enable
- always
- disable
default-information-route-map:
description:
- Default information route map. Source router.route-map.name.
default-metric:
description:
- Default metric of redistribute routes.
distance:
description:
- Distance of the route.
distance-external:
description:
- Administrative external distance.
distance-inter-area:
description:
- Administrative inter-area distance.
distance-intra-area:
description:
- Administrative intra-area distance.
distribute-list:
description:
- Distribute list configuration.
suboptions:
access-list:
description:
- Access list name. Source router.access-list.name.
id:
description:
- Distribute list entry ID.
required: true
protocol:
description:
- Protocol type.
choices:
- connected
- static
- rip
distribute-list-in:
description:
- Filter incoming routes. Source router.access-list.name router.prefix-list.name.
distribute-route-map-in:
description:
- Filter incoming external routes by route-map. Source router.route-map.name.
log-neighbour-changes:
description:
- Enable logging of OSPF neighbour's changes
choices:
- enable
- disable
neighbor:
description:
- OSPF neighbor configuration are used when OSPF runs on non-broadcast media
suboptions:
cost:
description:
- Cost of the interface, value range from 0 to 65535, 0 means auto-cost.
id:
description:
- Neighbor entry ID.
required: true
ip:
description:
- Interface IP address of the neighbor.
poll-interval:
description:
- Poll interval time in seconds.
priority:
description:
- Priority.
network:
description:
- OSPF network configuration.
suboptions:
area:
description:
- Attach the network to area.
id:
description:
- Network entry ID.
required: true
prefix:
description:
- Prefix.
ospf-interface:
description:
- OSPF interface configuration.
suboptions:
authentication:
description:
- Authentication type.
choices:
- none
- text
- md5
authentication-key:
description:
- Authentication key.
bfd:
description:
- Bidirectional Forwarding Detection (BFD).
choices:
- global
- enable
- disable
cost:
description:
- Cost of the interface, value range from 0 to 65535, 0 means auto-cost.
database-filter-out:
description:
- Enable/disable control of flooding out LSAs.
choices:
- enable
- disable
dead-interval:
description:
- Dead interval.
hello-interval:
description:
- Hello interval.
hello-multiplier:
description:
- Number of hello packets within dead interval.
interface:
description:
- Configuration interface name. Source system.interface.name.
ip:
description:
- IP address.
md5-key:
description:
- MD5 key.
mtu:
description:
- MTU for database description packets.
mtu-ignore:
description:
- Enable/disable ignore MTU.
choices:
- enable
- disable
name:
description:
- Interface entry name.
required: true
network-type:
description:
- Network type.
choices:
- broadcast
- non-broadcast
- point-to-point
- point-to-multipoint
- point-to-multipoint-non-broadcast
prefix-length:
description:
- Prefix length.
priority:
description:
- Priority.
resync-timeout:
description:
- Graceful restart neighbor resynchronization timeout.
retransmit-interval:
description:
- Retransmit interval.
status:
description:
- Enable/disable status.
choices:
- disable
- enable
transmit-delay:
description:
- Transmit delay.
passive-interface:
description:
- Passive interface configuration.
suboptions:
name:
description:
- Passive interface name. Source system.interface.name.
required: true
redistribute:
description:
- Redistribute configuration.
suboptions:
metric:
description:
- Redistribute metric setting.
metric-type:
description:
- Metric type.
choices:
- 1
- 2
name:
description:
- Redistribute name.
required: true
routemap:
description:
- Route map name. Source router.route-map.name.
status:
description:
- status
choices:
- enable
- disable
tag:
description:
- Tag value.
restart-mode:
description:
- OSPF restart mode (graceful or LLS).
choices:
- none
- lls
- graceful-restart
restart-period:
description:
- Graceful restart period.
rfc1583-compatible:
description:
- Enable/disable RFC1583 compatibility.
choices:
- enable
- disable
router-id:
description:
- Router ID.
spf-timers:
description:
- SPF calculation frequency.
summary-address:
description:
- IP address summary configuration.
suboptions:
advertise:
description:
- Enable/disable advertise status.
choices:
- disable
- enable
id:
description:
- Summary address entry ID.
required: true
prefix:
description:
- Prefix.
tag:
description:
- Tag value.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure OSPF.
fortios_router_ospf:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
router_ospf:
abr-type: "cisco"
area:
-
authentication: "none"
default-cost: "6"
filter-list:
-
direction: "in"
id: "9"
list: "<your_own_value> (source router.access-list.name router.prefix-list.name)"
id: "11"
nssa-default-information-originate: "enable"
nssa-default-information-originate-metric: "13"
nssa-default-information-originate-metric-type: "1"
nssa-redistribution: "enable"
nssa-translator-role: "candidate"
range:
-
advertise: "disable"
id: "19"
prefix: "<your_own_value>"
substitute: "<your_own_value>"
substitute-status: "enable"
shortcut: "disable"
stub-type: "no-summary"
type: "regular"
virtual-link:
-
authentication: "none"
authentication-key: "<your_own_value>"
dead-interval: "29"
hello-interval: "30"
md5-key: "<your_own_value>"
name: "default_name_32"
peer: "<your_own_value>"
retransmit-interval: "34"
transmit-delay: "35"
auto-cost-ref-bandwidth: "36"
bfd: "enable"
database-overflow: "enable"
database-overflow-max-lsas: "39"
database-overflow-time-to-recover: "40"
default-information-metric: "41"
default-information-metric-type: "1"
default-information-originate: "enable"
default-information-route-map: "<your_own_value> (source router.route-map.name)"
default-metric: "45"
distance: "46"
distance-external: "47"
distance-inter-area: "48"
distance-intra-area: "49"
distribute-list:
-
access-list: "<your_own_value> (source router.access-list.name)"
id: "52"
protocol: "connected"
distribute-list-in: "<your_own_value> (source router.access-list.name router.prefix-list.name)"
distribute-route-map-in: "<your_own_value> (source router.route-map.name)"
log-neighbour-changes: "enable"
neighbor:
-
cost: "58"
id: "59"
ip: "<your_own_value>"
poll-interval: "61"
priority: "62"
network:
-
area: "<your_own_value>"
id: "65"
prefix: "<your_own_value>"
ospf-interface:
-
authentication: "none"
authentication-key: "<your_own_value>"
bfd: "global"
cost: "71"
database-filter-out: "enable"
dead-interval: "73"
hello-interval: "74"
hello-multiplier: "75"
interface: "<your_own_value> (source system.interface.name)"
ip: "<your_own_value>"
md5-key: "<your_own_value>"
mtu: "79"
mtu-ignore: "enable"
name: "default_name_81"
network-type: "broadcast"
prefix-length: "83"
priority: "84"
resync-timeout: "85"
retransmit-interval: "86"
status: "disable"
transmit-delay: "88"
passive-interface:
-
name: "default_name_90 (source system.interface.name)"
redistribute:
-
metric: "92"
metric-type: "1"
name: "default_name_94"
routemap: "<your_own_value> (source router.route-map.name)"
status: "enable"
tag: "97"
restart-mode: "none"
restart-period: "99"
rfc1583-compatible: "enable"
router-id: "<your_own_value>"
spf-timers: "<your_own_value>"
summary-address:
-
advertise: "disable"
id: "105"
prefix: "<your_own_value>"
tag: "107"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_router_ospf_data(json):
option_list = ['abr-type', 'area', 'auto-cost-ref-bandwidth',
'bfd', 'database-overflow', 'database-overflow-max-lsas',
'database-overflow-time-to-recover', 'default-information-metric', 'default-information-metric-type',
'default-information-originate', 'default-information-route-map', 'default-metric',
'distance', 'distance-external', 'distance-inter-area',
'distance-intra-area', 'distribute-list', 'distribute-list-in',
'distribute-route-map-in', 'log-neighbour-changes', 'neighbor',
'network', 'ospf-interface', 'passive-interface',
'redistribute', 'restart-mode', 'restart-period',
'rfc1583-compatible', 'router-id', 'spf-timers',
'summary-address']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def router_ospf(data, fos):
vdom = data['vdom']
router_ospf_data = data['router_ospf']
flattened_data = flatten_multilists_attributes(router_ospf_data)
filtered_data = filter_router_ospf_data(flattened_data)
return fos.set('router',
'ospf',
data=filtered_data,
vdom=vdom)
def fortios_router(data, fos):
login(data)
if data['router_ospf']:
resp = router_ospf(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"router_ospf": {
"required": False, "type": "dict",
"options": {
"abr-type": {"required": False, "type": "str",
"choices": ["cisco", "ibm", "shortcut",
"standard"]},
"area": {"required": False, "type": "list",
"options": {
"authentication": {"required": False, "type": "str",
"choices": ["none", "text", "md5"]},
"default-cost": {"required": False, "type": "int"},
"filter-list": {"required": False, "type": "list",
"options": {
"direction": {"required": False, "type": "str",
"choices": ["in", "out"]},
"id": {"required": True, "type": "int"},
"list": {"required": False, "type": "str"}
}},
"id": {"required": True, "type": "str"},
"nssa-default-information-originate": {"required": False, "type": "str",
"choices": ["enable", "always", "disable"]},
"nssa-default-information-originate-metric": {"required": False, "type": "int"},
"nssa-default-information-originate-metric-type": {"required": False, "type": "str",
"choices": ["1", "2"]},
"nssa-redistribution": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"nssa-translator-role": {"required": False, "type": "str",
"choices": ["candidate", "never", "always"]},
"range": {"required": False, "type": "list",
"options": {
"advertise": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"id": {"required": True, "type": "int"},
"prefix": {"required": False, "type": "str"},
"substitute": {"required": False, "type": "str"},
"substitute-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"shortcut": {"required": False, "type": "str",
"choices": ["disable", "enable", "default"]},
"stub-type": {"required": False, "type": "str",
"choices": ["no-summary", "summary"]},
"type": {"required": False, "type": "str",
"choices": ["regular", "nssa", "stub"]},
"virtual-link": {"required": False, "type": "list",
"options": {
"authentication": {"required": False, "type": "str",
"choices": ["none", "text", "md5"]},
"authentication-key": {"required": False, "type": "str"},
"dead-interval": {"required": False, "type": "int"},
"hello-interval": {"required": False, "type": "int"},
"md5-key": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"peer": {"required": False, "type": "str"},
"retransmit-interval": {"required": False, "type": "int"},
"transmit-delay": {"required": False, "type": "int"}
}}
}},
"auto-cost-ref-bandwidth": {"required": False, "type": "int"},
"bfd": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"database-overflow": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"database-overflow-max-lsas": {"required": False, "type": "int"},
"database-overflow-time-to-recover": {"required": False, "type": "int"},
"default-information-metric": {"required": False, "type": "int"},
"default-information-metric-type": {"required": False, "type": "str",
"choices": ["1", "2"]},
"default-information-originate": {"required": False, "type": "str",
"choices": ["enable", "always", "disable"]},
"default-information-route-map": {"required": False, "type": "str"},
"default-metric": {"required": False, "type": "int"},
"distance": {"required": False, "type": "int"},
"distance-external": {"required": False, "type": "int"},
"distance-inter-area": {"required": False, "type": "int"},
"distance-intra-area": {"required": False, "type": "int"},
"distribute-list": {"required": False, "type": "list",
"options": {
"access-list": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"protocol": {"required": False, "type": "str",
"choices": ["connected", "static", "rip"]}
}},
"distribute-list-in": {"required": False, "type": "str"},
"distribute-route-map-in": {"required": False, "type": "str"},
"log-neighbour-changes": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"neighbor": {"required": False, "type": "list",
"options": {
"cost": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"ip": {"required": False, "type": "str"},
"poll-interval": {"required": False, "type": "int"},
"priority": {"required": False, "type": "int"}
}},
"network": {"required": False, "type": "list",
"options": {
"area": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"prefix": {"required": False, "type": "str"}
}},
"ospf-interface": {"required": False, "type": "list",
"options": {
"authentication": {"required": False, "type": "str",
"choices": ["none", "text", "md5"]},
"authentication-key": {"required": False, "type": "str"},
"bfd": {"required": False, "type": "str",
"choices": ["global", "enable", "disable"]},
"cost": {"required": False, "type": "int"},
"database-filter-out": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dead-interval": {"required": False, "type": "int"},
"hello-interval": {"required": False, "type": "int"},
"hello-multiplier": {"required": False, "type": "int"},
"interface": {"required": False, "type": "str"},
"ip": {"required": False, "type": "str"},
"md5-key": {"required": False, "type": "str"},
"mtu": {"required": False, "type": "int"},
"mtu-ignore": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": True, "type": "str"},
"network-type": {"required": False, "type": "str",
"choices": ["broadcast", "non-broadcast", "point-to-point",
"point-to-multipoint", "point-to-multipoint-non-broadcast"]},
"prefix-length": {"required": False, "type": "int"},
"priority": {"required": False, "type": "int"},
"resync-timeout": {"required": False, "type": "int"},
"retransmit-interval": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"transmit-delay": {"required": False, "type": "int"}
}},
"passive-interface": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"redistribute": {"required": False, "type": "list",
"options": {
"metric": {"required": False, "type": "int"},
"metric-type": {"required": False, "type": "str",
"choices": ["1", "2"]},
"name": {"required": True, "type": "str"},
"routemap": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tag": {"required": False, "type": "int"}
}},
"restart-mode": {"required": False, "type": "str",
"choices": ["none", "lls", "graceful-restart"]},
"restart-period": {"required": False, "type": "int"},
"rfc1583-compatible": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"router-id": {"required": False, "type": "str"},
"spf-timers": {"required": False, "type": "str"},
"summary-address": {"required": False, "type": "list",
"options": {
"advertise": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"id": {"required": True, "type": "int"},
"prefix": {"required": False, "type": "str"},
"tag": {"required": False, "type": "int"}
}}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_router(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
habibmasuro/django-wiki | testproject/testproject/wsgi.py | 7 | 1342 | """
WSGI config for testproject project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
PROJECT_PATH = os.path.abspath(os.path.split(__file__)[0])
PROJECT_PARENT = os.path.abspath(os.path.split(PROJECT_PATH)[0])
sys.path.append(PROJECT_PATH)
sys.path.append(PROJECT_PARENT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproject.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| gpl-3.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/sklearn/externals/joblib/numpy_pickle.py | 18 | 15334 | """
Utilities for fast persistence of big data, with optional compression.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import traceback
import sys
import os
import zlib
import warnings
if sys.version_info[0] >= 3:
from io import BytesIO
from pickle import _Unpickler as Unpickler
def asbytes(s):
if isinstance(s, bytes):
return s
return s.encode('latin1')
else:
try:
from io import BytesIO
except ImportError:
# BytesIO has been added in Python 2.5
from cStringIO import StringIO as BytesIO
from pickle import Unpickler
asbytes = str
_MEGA = 2 ** 20
_MAX_LEN = len(hex(2 ** 64))
# To detect file types
_ZFILE_PREFIX = asbytes('ZF')
###############################################################################
# Compressed file with Zlib
def _read_magic(file_handle):
""" Utility to check the magic signature of a file identifying it as a
Zfile
"""
magic = file_handle.read(len(_ZFILE_PREFIX))
# Pickling needs file-handles at the beginning of the file
file_handle.seek(0)
return magic
def read_zfile(file_handle):
"""Read the z-file and return the content as a string
Z-files are raw data compressed with zlib used internally by joblib
for persistence. Backward compatibility is not garantied. Do not
use for external purposes.
"""
file_handle.seek(0)
assert _read_magic(file_handle) == _ZFILE_PREFIX, \
"File does not have the right magic"
length = file_handle.read(len(_ZFILE_PREFIX) + _MAX_LEN)
length = length[len(_ZFILE_PREFIX):]
length = int(length, 16)
# We use the known length of the data to tell Zlib the size of the
# buffer to allocate.
data = zlib.decompress(file_handle.read(), 15, length)
assert len(data) == length, (
"Incorrect data length while decompressing %s."
"The file could be corrupted." % file_handle)
return data
def write_zfile(file_handle, data, compress=1):
"""Write the data in the given file as a Z-file.
Z-files are raw data compressed with zlib used internally by joblib
for persistence. Backward compatibility is not guarantied. Do not
use for external purposes.
"""
file_handle.write(_ZFILE_PREFIX)
length = hex(len(data))
if sys.version_info[0] < 3 and type(length) is long:
# We need to remove the trailing 'L' in the hex representation
length = length[:-1]
# Store the length of the data
file_handle.write(length.ljust(_MAX_LEN))
file_handle.write(zlib.compress(data, compress))
###############################################################################
# Utility objects for persistence.
class NDArrayWrapper(object):
""" An object to be persisted instead of numpy arrays.
The only thing this object does, is to carry the filename in which
the array has been persisted, and the array subclass.
"""
def __init__(self, filename, subclass):
"Store the useful information for later"
self.filename = filename
self.subclass = subclass
def read(self, unpickler):
"Reconstruct the array"
filename = os.path.join(unpickler._dirname, self.filename)
# Load the array from the disk
if unpickler.np.__version__ >= '1.3':
array = unpickler.np.load(filename,
mmap_mode=unpickler.mmap_mode)
else:
# Numpy does not have mmap_mode before 1.3
array = unpickler.np.load(filename)
# Reconstruct subclasses. This does not work with old
# versions of numpy
if (hasattr(array, '__array_prepare__')
and not self.subclass in (unpickler.np.ndarray,
unpickler.np.memmap)):
# We need to reconstruct another subclass
new_array = unpickler.np.core.multiarray._reconstruct(
self.subclass, (0,), 'b')
new_array.__array_prepare__(array)
array = new_array
return array
class ZNDArrayWrapper(NDArrayWrapper):
"""An object to be persisted instead of numpy arrays.
This object store the Zfile filename in wich
the data array has been persisted, and the meta information to
retrieve it.
The reason that we store the raw buffer data of the array and
the meta information, rather than array representation routine
(tostring) is that it enables us to use completely the strided
model to avoid memory copies (a and a.T store as fast). In
addition saving the heavy information separately can avoid
creating large temporary buffers when unpickling data with
large arrays.
"""
def __init__(self, filename, init_args, state):
"Store the useful information for later"
self.filename = filename
self.state = state
self.init_args = init_args
def read(self, unpickler):
"Reconstruct the array from the meta-information and the z-file"
# Here we a simply reproducing the unpickling mechanism for numpy
# arrays
filename = os.path.join(unpickler._dirname, self.filename)
array = unpickler.np.core.multiarray._reconstruct(*self.init_args)
data = read_zfile(open(filename, 'rb'))
state = self.state + (data,)
array.__setstate__(state)
return array
###############################################################################
# Pickler classes
class NumpyPickler(pickle.Pickler):
"""A pickler to persist of big data efficiently.
The main features of this object are:
* persistence of numpy arrays in separate .npy files, for which
I/O is fast.
* optional compression using Zlib, with a special care on avoid
temporaries.
"""
def __init__(self, filename, compress=0, cache_size=100):
self._filename = filename
self._filenames = [filename, ]
self.cache_size = cache_size
self.compress = compress
if not self.compress:
self.file = open(filename, 'wb')
else:
self.file = BytesIO()
# Count the number of npy files that we have created:
self._npy_counter = 0
pickle.Pickler.__init__(self, self.file,
protocol=pickle.HIGHEST_PROTOCOL)
# delayed import of numpy, to avoid tight coupling
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _write_array(self, array, filename):
if not self.compress:
self.np.save(filename, array)
container = NDArrayWrapper(os.path.basename(filename),
type(array))
else:
filename += '.z'
# Efficient compressed storage:
# The meta data is stored in the container, and the core
# numerics in a z-file
_, init_args, state = array.__reduce__()
# the last entry of 'state' is the data itself
zfile = open(filename, 'wb')
write_zfile(zfile, state[-1],
compress=self.compress)
zfile.close()
state = state[:-1]
container = ZNDArrayWrapper(os.path.basename(filename),
init_args, state)
return container, filename
def save(self, obj):
""" Subclass the save method, to save ndarray subclasses in npy
files, rather than pickling them. Of course, this is a
total abuse of the Pickler class.
"""
if self.np is not None and type(obj) in (self.np.ndarray,
self.np.matrix, self.np.memmap):
size = obj.size * obj.itemsize
if self.compress and size < self.cache_size * _MEGA:
# When compressing, as we are not writing directly to the
# disk, it is more efficient to use standard pickling
if type(obj) is self.np.memmap:
# Pickling doesn't work with memmaped arrays
obj = self.np.asarray(obj)
return pickle.Pickler.save(self, obj)
self._npy_counter += 1
try:
filename = '%s_%02i.npy' % (self._filename,
self._npy_counter)
# This converts the array in a container
obj, filename = self._write_array(obj, filename)
self._filenames.append(filename)
except:
self._npy_counter -= 1
# XXX: We should have a logging mechanism
print 'Failed to save %s to .npy file:\n%s' % (
type(obj),
traceback.format_exc())
return pickle.Pickler.save(self, obj)
def close(self):
if self.compress:
zfile = open(self._filename, 'wb')
write_zfile(zfile,
self.file.getvalue(), self.compress)
zfile.close()
class NumpyUnpickler(Unpickler):
"""A subclass of the Unpickler to unpickle our numpy pickles.
"""
dispatch = Unpickler.dispatch.copy()
def __init__(self, filename, file_handle, mmap_mode=None):
self._filename = os.path.basename(filename)
self._dirname = os.path.dirname(filename)
self.mmap_mode = mmap_mode
self.file_handle = self._open_pickle(file_handle)
Unpickler.__init__(self, self.file_handle)
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _open_pickle(self, file_handle):
return file_handle
def load_build(self):
""" This method is called to set the state of a newly created
object.
We capture it to replace our place-holder objects,
NDArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
"""
Unpickler.load_build(self)
if isinstance(self.stack[-1], NDArrayWrapper):
if self.np is None:
raise ImportError('Trying to unpickle an ndarray, '
"but numpy didn't import correctly")
nd_array_wrapper = self.stack.pop()
array = nd_array_wrapper.read(self)
self.stack.append(array)
# Be careful to register our new method.
dispatch[pickle.BUILD] = load_build
class ZipNumpyUnpickler(NumpyUnpickler):
"""A subclass of our Unpickler to unpickle on the fly from
compressed storage."""
def __init__(self, filename, file_handle):
NumpyUnpickler.__init__(self, filename,
file_handle,
mmap_mode=None)
def _open_pickle(self, file_handle):
return BytesIO(read_zfile(file_handle))
###############################################################################
# Utility functions
def dump(value, filename, compress=0, cache_size=100):
"""Fast persistence of an arbitrary Python object into a files, with
dedicated storage for numpy arrays.
Parameters
-----------
value: any Python object
The object to store to disk
filename: string
The name of the file in which it is to be stored
compress: integer for 0 to 9, optional
Optional compression level for the data. 0 is no compression.
Higher means more compression, but also slower read and
write times. Using a value of 3 is often a good compromise.
See the notes for more details.
cache_size: positive number, optional
Fixes the order of magnitude (in megabytes) of the cache used
for in-memory compression. Note that this is just an order of
magnitude estimate and that for big arrays, the code will go
over this value at dump and at load time.
Returns
-------
filenames: list of strings
The list of file names in which the data is stored. If
compress is false, each array is stored in a different file.
See Also
--------
joblib.load : corresponding loader
Notes
-----
Memmapping on load cannot be used for compressed files. Thus
using compression can significantly slow down loading. In
addition, compressed files take extra extra memory during
dump and load.
"""
if not isinstance(filename, basestring):
# People keep inverting arguments, and the resulting error is
# incomprehensible
raise ValueError(
'Second argument should be a filename, %s (type %s) was given'
% (filename, type(filename))
)
try:
pickler = NumpyPickler(filename, compress=compress,
cache_size=cache_size)
pickler.dump(value)
pickler.close()
finally:
if 'pickler' in locals() and hasattr(pickler, 'file'):
pickler.file.flush()
pickler.file.close()
return pickler._filenames
def load(filename, mmap_mode=None):
"""Reconstruct a Python object from a file persisted with joblib.load.
Parameters
-----------
filename: string
The name of the file from which to load the object
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
If not None, the arrays are memory-mapped from the disk. This
mode has not effect for compressed files. Note that in this
case the reconstructed object might not longer match exactly
the originally pickled object.
Returns
-------
result: any Python object
The object stored in the file.
See Also
--------
joblib.dump : function to save an object
Notes
-----
This function can load numpy array files saved separately during the
dump. If the mmap_mode argument is given, it is passed to np.load and
arrays are loaded as memmaps. As a consequence, the reconstructed
object might not match the original pickled object. Note that if the
file was saved with compression, the arrays cannot be memmaped.
"""
file_handle = open(filename, 'rb')
# We are careful to open the file hanlde early and keep it open to
# avoid race-conditions on renames. That said, if data are stored in
# companion files, moving the directory will create a race when
# joblib tries to access the companion files.
if _read_magic(file_handle) == _ZFILE_PREFIX:
if mmap_mode is not None:
warnings.warn('file "%(filename)s" appears to be a zip, '
'ignoring mmap_mode "%(mmap_mode)s" flag passed'
% locals(), Warning, stacklevel=2)
unpickler = ZipNumpyUnpickler(filename, file_handle=file_handle)
else:
unpickler = NumpyUnpickler(filename,
file_handle=file_handle,
mmap_mode=mmap_mode)
try:
obj = unpickler.load()
finally:
if hasattr(unpickler, 'file_handle'):
unpickler.file_handle.close()
return obj
| agpl-3.0 |
jss-emr/openerp-7-src | openerp/addons/l10n_si/account_wizard.py | 51 | 1162 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) conexus.at
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from osv import osv
import addons
class AccountWizard_cd(osv.osv_memory):
_inherit='wizard.multi.charts.accounts'
_defaults = {
'code_digits' : 6,
}
AccountWizard_cd()
| agpl-3.0 |
KMTsvetanov/Zend2all | node_modules/node-gyp/gyp/pylib/gyp/input_test.py | 1841 | 3207 | #!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a']]],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
miptliot/edx-ora2 | openassessment/assessment/views.py | 3 | 1860 | import logging
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from openassessment.assessment.api.peer import get_assessments
from submissions.api import SubmissionRequestError, get_submissions
log = logging.getLogger(__name__)
@login_required()
def get_evaluations_for_student_item(request, course_id, student_id, item_id): # pylint: disable=unused-argument
"""Retrieve all evaluations associated with the given student item.
Developer utility for accessing all the evaluations associated with a
student item. The student item is specified by the unique combination of
course, student, and item.
Args:
request (dict): The request.
course_id (str): The course id for this student item.
student_id (str): The student id for this student item.
item_id (str): The item id for this student item.
Returns:
HttpResponse: The response object for this request. Renders a simple
development page with all the evaluations related to the specified
student item.
"""
student_item_dict = dict(
course_id=course_id,
student_id=student_id,
item_id=item_id,
)
context = dict(**student_item_dict)
try:
submissions = get_submissions(student_item_dict)
evaluations = []
for submission in submissions:
submission_evaluations = get_assessments(submission["uuid"])
for evaluation in submission_evaluations:
evaluation["submission_uuid"] = submission["uuid"]
evaluations.append(evaluation)
context["evaluations"] = evaluations
except SubmissionRequestError:
context["error"] = "The specified student item was not found."
return render_to_response('evaluations.html', context)
| agpl-3.0 |
rosenjens/monad | OpenStreetMap/routesGenerator/src/python/test_router.py | 2 | 1750 | """
Copyright 2015 Ericsson AB
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import unittest
import os.path
import sys
import router
FILE = "testmap.xml"
#def main():
#
# unittest.main()
class RouterTester(unittest.TestCase):
"""
Class for testing the router.py
"""
#@classmethod
#def setUpClass(cls):
# if os.path.isfile(FILE):
# cls._map = router.Map(FILE)
# cls._map.parsData()
# else:
# assert False, ("File not found: ", FILE)
# set up the test
def setUp(self):
assert os.path.isfile(FILE)
self._map = router.Map(FILE)
self._map.parsData()
pass
@unittest.skipUnless(os.path.isfile(FILE), "fileNotFound")
def testPathFinder(self):
path, _ = self._map.findRoute(-439079, -439083)
self.assertGreater(len(path), 0)
# The path from one node to itself should be itself.
path, cost = self._map.findRoute(-439079, -439079)
self.assertEqual(path, [-439079])
self.assertEqual(cost, 0)
@unittest.skipUnless(os.path.isfile(FILE), "fileNotFound")
def test(self):
pass
if __name__ == '__main__':
print os.path.dirname(os.path.realpath(__file__))
#main()
unittest.main()
| apache-2.0 |
yelongyu/chihu | venv/lib/python2.7/site-packages/markupsafe/tests.py | 674 | 6107 | # -*- coding: utf-8 -*-
import gc
import sys
import unittest
from markupsafe import Markup, escape, escape_silent
from markupsafe._compat import text_type
class MarkupTestCase(unittest.TestCase):
def test_adding(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
def test_string_interpolation(self):
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
assert Markup('%i') % 3.14 == '3'
assert Markup('%.2f') % 3.14 == '3.14'
def test_type_behavior(self):
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
def test_html_interop(self):
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
__str__ = __unicode__
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
def test_tuple_interpol(self):
self.assertEqual(Markup('<em>%s:%s</em>') % (
'<foo>',
'<bar>',
), Markup(u'<em><foo>:<bar></em>'))
def test_dict_interpol(self):
self.assertEqual(Markup('<em>%(foo)s</em>') % {
'foo': '<foo>',
}, Markup(u'<em><foo></em>'))
self.assertEqual(Markup('<em>%(foo)s:%(bar)s</em>') % {
'foo': '<foo>',
'bar': '<bar>',
}, Markup(u'<em><foo>:<bar></em>'))
def test_escaping(self):
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_formatting(self):
for actual, expected in (
(Markup('%i') % 3.14, '3'),
(Markup('%.2f') % 3.14159, '3.14'),
(Markup('%s %s %s') % ('<', 123, '>'), '< 123 >'),
(Markup('<em>{awesome}</em>').format(awesome='<awesome>'),
'<em><awesome></em>'),
(Markup('{0[1][bar]}').format([0, {'bar': '<bar/>'}]),
'<bar/>'),
(Markup('{0[1][bar]}').format([0, {'bar': Markup('<bar/>')}]),
'<bar/>')):
assert actual == expected, "%r should be %r!" % (actual, expected)
# This is new in 2.7
if sys.version_info >= (2, 7):
def test_formatting_empty(self):
formatted = Markup('{}').format(0)
assert formatted == Markup('0')
def test_custom_formatting(self):
class HasHTMLOnly(object):
def __html__(self):
return Markup('<foo>')
class HasHTMLAndFormat(object):
def __html__(self):
return Markup('<foo>')
def __html_format__(self, spec):
return Markup('<FORMAT>')
assert Markup('{0}').format(HasHTMLOnly()) == Markup('<foo>')
assert Markup('{0}').format(HasHTMLAndFormat()) == Markup('<FORMAT>')
def test_complex_custom_formatting(self):
class User(object):
def __init__(self, id, username):
self.id = id
self.username = username
def __html_format__(self, format_spec):
if format_spec == 'link':
return Markup('<a href="/user/{0}">{1}</a>').format(
self.id,
self.__html__(),
)
elif format_spec:
raise ValueError('Invalid format spec')
return self.__html__()
def __html__(self):
return Markup('<span class=user>{0}</span>').format(self.username)
user = User(1, 'foo')
assert Markup('<p>User: {0:link}').format(user) == \
Markup('<p>User: <a href="/user/1"><span class=user>foo</span></a>')
def test_all_set(self):
import markupsafe as markup
for item in markup.__all__:
getattr(markup, item)
def test_escape_silent(self):
assert escape_silent(None) == Markup()
assert escape(None) == Markup(None)
assert escape_silent('<foo>') == Markup(u'<foo>')
def test_splitting(self):
self.assertEqual(Markup('a b').split(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a b').rsplit(), [
Markup('a'),
Markup('b')
])
self.assertEqual(Markup('a\nb').splitlines(), [
Markup('a'),
Markup('b')
])
def test_mul(self):
self.assertEqual(Markup('a') * 3, Markup('aaa'))
class MarkupLeakTestCase(unittest.TestCase):
def test_markup_leaks(self):
counts = set()
for count in range(20):
for item in range(1000):
escape("foo")
escape("<foo>")
escape(u"foo")
escape(u"<foo>")
counts.add(len(gc.get_objects()))
assert len(counts) == 1, 'ouch, c extension seems to leak objects'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MarkupTestCase))
# this test only tests the c extension
if not hasattr(escape, 'func_code'):
suite.addTest(unittest.makeSuite(MarkupLeakTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
# vim:sts=4:sw=4:et:
| gpl-3.0 |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/django/contrib/auth/management/__init__.py | 109 | 5023 | """
Creates permissions for all installed apps that need permissions.
"""
from __future__ import unicode_literals
import getpass
import unicodedata
from django.apps import apps as global_apps
from django.contrib.auth import get_permission_codename
from django.core import exceptions
from django.db import DEFAULT_DB_ALIAS, router
from django.utils import six
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
def _get_all_permissions(opts):
"""
Returns (codename, name) for all permissions in the given opts.
"""
builtin = _get_builtin_permissions(opts)
custom = list(opts.permissions)
return builtin + custom
def _get_builtin_permissions(opts):
"""
Returns (codename, name) for all autogenerated permissions.
By default, this is ('add', 'change', 'delete')
"""
perms = []
for action in opts.default_permissions:
perms.append((
get_permission_codename(action, opts),
'Can %s %s' % (action, opts.verbose_name_raw)
))
return perms
def create_permissions(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs):
if not app_config.models_module:
return
app_label = app_config.label
try:
app_config = apps.get_app_config(app_label)
ContentType = apps.get_model('contenttypes', 'ContentType')
Permission = apps.get_model('auth', 'Permission')
except LookupError:
return
if not router.allow_migrate_model(using, Permission):
return
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
for klass in app_config.get_models():
# Force looking up the content types in the current database
# before creating foreign keys to them.
ctype = ContentType.objects.db_manager(using).get_for_model(klass)
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a content_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(Permission.objects.using(using).filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
perms = [
Permission(codename=codename, name=name, content_type=ct)
for ct, (codename, name) in searched_perms
if (ct.pk, codename) not in all_perms
]
Permission.objects.using(using).bulk_create(perms)
if verbosity >= 2:
for perm in perms:
print("Adding permission '%s'" % perm)
def get_system_username():
"""
Try to determine the current system user's username.
:returns: The username as a unicode string, or an empty string if the
username could not be determined.
"""
try:
result = getpass.getuser()
except (ImportError, KeyError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
return ''
if six.PY2:
try:
result = result.decode(DEFAULT_LOCALE_ENCODING)
except UnicodeDecodeError:
# UnicodeDecodeError - preventive treatment for non-latin Windows.
return ''
return result
def get_default_username(check_db=True):
"""
Try to determine the current system user's username to use as a default.
:param check_db: If ``True``, requires that the username does not match an
existing ``auth.User`` (otherwise returns an empty string).
:returns: The username, or an empty string if no username can be
determined.
"""
# This file is used in apps.py, it should not trigger models import.
from django.contrib.auth import models as auth_app
# If the User model has been swapped out, we can't make any assumptions
# about the default user name.
if auth_app.User._meta.swapped:
return ''
default_username = get_system_username()
try:
default_username = (
unicodedata.normalize('NFKD', default_username)
.encode('ascii', 'ignore').decode('ascii')
.replace(' ', '').lower()
)
except UnicodeDecodeError:
return ''
# Run the username validator
try:
auth_app.User._meta.get_field('username').run_validators(default_username)
except exceptions.ValidationError:
return ''
# Don't return the default username if it is already taken.
if check_db and default_username:
try:
auth_app.User._default_manager.get(username=default_username)
except auth_app.User.DoesNotExist:
pass
else:
return ''
return default_username
| gpl-3.0 |
scottrogowski/code2flow | code2flow/model.py | 1 | 19171 | import abc
import os
TRUNK_COLOR = '#966F33'
LEAF_COLOR = '#6db33f'
EDGE_COLOR = "#cf142b"
NODE_COLOR = "#cccccc"
class Namespace(dict):
"""
Abstract constants class
Constants can be accessed via .attribute or [key] and can be iterated over.
"""
def __init__(self, *args, **kwargs):
d = {k: k for k in args}
d.update(dict(kwargs.items()))
super().__init__(d)
def __getattr__(self, item):
return self[item]
OWNER_CONST = Namespace("UNKNOWN_VAR", "UNKNOWN_MODULE")
GROUP_TYPE = Namespace("FILE", "CLASS", "NAMESPACE")
def is_installed(executable_cmd):
"""
Determine whether a command can be run or not
:param list[str] individual_files:
:rtype: str
"""
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, executable_cmd)
if os.path.isfile(exe_file) and os.access(exe_file, os.X_OK):
return True
return False
def djoin(*tup):
"""
Convenience method to join strings with dots
:rtype: str
"""
if len(tup) == 1 and isinstance(tup[0], list):
return '.'.join(tup[0])
return '.'.join(tup)
def flatten(list_of_lists):
"""
Return a list from a list of lists
:param list[list[Value]] list_of_lists:
:rtype: list[Value]
"""
return [el for sublist in list_of_lists for el in sublist]
def _resolve_str_variable(variable, file_groups):
"""
String variables are when variable.points_to is a string
This happens ONLY when we have imports that we delayed processing for
This function looks through all files to see if any particular node matches
the variable.points_to string
:param Variable variable:
:param list[Group] file_groups:
:rtype: Node|Group|str
"""
for file_group in file_groups:
for node in file_group.all_nodes():
if any(ot == variable.points_to for ot in node.import_tokens):
return node
for group in file_group.all_groups():
if any(ot == variable.points_to for ot in group.import_tokens):
return group
return OWNER_CONST.UNKNOWN_MODULE
class BaseLanguage(abc.ABC):
"""
Languages are individual implementations for different dynamic languages.
This is the superclass of Python, Javascript, PHP, and Ruby.
Every implementation must implement all of these methods.
For more detail, see the individual implementations.
Note that the 'Tree' type is generic and will be a different
type for different languages. In Python, it is an ast.AST.
"""
@staticmethod
@abc.abstractmethod
def assert_dependencies():
"""
:rtype: None
"""
@staticmethod
@abc.abstractmethod
def get_tree(filename, lang_params):
"""
:param filename str:
:rtype: Tree
"""
@staticmethod
@abc.abstractmethod
def separate_namespaces(tree):
"""
:param tree Tree:
:rtype: (list[tree], list[tree], list[tree])
"""
@staticmethod
@abc.abstractmethod
def make_nodes(tree, parent):
"""
:param tree Tree:
:param parent Group:
:rtype: list[Node]
"""
@staticmethod
@abc.abstractmethod
def make_root_node(lines, parent):
"""
:param lines list[Tree]:
:param parent Group:
:rtype: Node
"""
@staticmethod
@abc.abstractmethod
def make_class_group(tree, parent):
"""
:param tree Tree:
:param parent Group:
:rtype: Group
"""
class Variable():
"""
Variables represent named tokens that are accessible to their scope.
They may either point to a string or, once resolved, a Group/Node.
Not all variables can be resolved
"""
def __init__(self, token, points_to, line_number=None):
"""
:param str token:
:param str|Call|Node|Group points_to: (str/Call is eventually resolved to Nodes|Groups)
:param int|None line_number:
"""
assert token
assert points_to
self.token = token
self.points_to = points_to
self.line_number = line_number
def __repr__(self):
return f"<Variable token={self.token} points_to={repr(self.points_to)}"
def to_string(self):
"""
For logging
:rtype: str
"""
if self.points_to and isinstance(self.points_to, (Group, Node)):
return f'{self.token}->{self.points_to.token}'
return f'{self.token}->{self.points_to}'
class Call():
"""
Calls represent function call expressions.
They can be an attribute call like
object.do_something()
Or a "naked" call like
do_something()
"""
def __init__(self, token, line_number=None, owner_token=None, definite_constructor=False):
self.token = token
self.owner_token = owner_token
self.line_number = line_number
self.definite_constructor = definite_constructor
def __repr__(self):
return f"<Call owner_token={self.owner_token} token={self.token}>"
def to_string(self):
"""
Returns a representation of this call to be printed by the engine
in logging.
:rtype: str
"""
if self.owner_token:
return f"{self.owner_token}.{self.token}()"
return f"{self.token}()"
def is_attr(self):
"""
Attribute calls are like `a.do_something()` rather than `do_something()`
:rtype: bool
"""
return self.owner_token is not None
def matches_variable(self, variable):
"""
Check whether this variable is what the call is acting on.
For example, if we had 'obj' from
obj = Obj()
as a variable and a call of
obj.do_something()
Those would match and we would return the "do_something" node from obj.
:param variable Variable:
:rtype: Node
"""
if self.is_attr():
if self.owner_token == variable.token:
for node in getattr(variable.points_to, 'nodes', []):
if self.token == node.token:
return node
for inherit_nodes in getattr(variable.points_to, 'inherits', []):
for node in inherit_nodes:
if self.token == node.token:
return node
if variable.points_to in OWNER_CONST:
return variable.points_to
# This section is specifically for resolving namespace variables
if isinstance(variable.points_to, Group) \
and variable.points_to.group_type == GROUP_TYPE.NAMESPACE:
parts = self.owner_token.split('.')
if len(parts) != 2:
return None
if parts[0] != variable.token:
return None
for node in variable.points_to.all_nodes():
if parts[1] == node.namespace_ownership() \
and self.token == node.token:
return node
return None
if self.token == variable.token:
if isinstance(variable.points_to, Node):
return variable.points_to
if isinstance(variable.points_to, Group) \
and variable.points_to.group_type == GROUP_TYPE.CLASS \
and variable.points_to.get_constructor():
return variable.points_to.get_constructor()
return None
class Node():
def __init__(self, token, calls, variables, parent, import_tokens=None,
line_number=None, is_constructor=False):
self.token = token
self.line_number = line_number
self.calls = calls
self.variables = variables
self.import_tokens = import_tokens or []
self.parent = parent
self.is_constructor = is_constructor
self.uid = "node_" + os.urandom(4).hex()
# Assume it is a leaf and a trunk. These are modified later
self.is_leaf = True # it calls nothing else
self.is_trunk = True # nothing calls it
def __repr__(self):
return f"<Node token={self.token} parent={self.parent}>"
def name(self):
"""
Names exist largely for unit tests
:rtype: str
"""
return f"{self.first_group().filename()}::{self.token_with_ownership()}"
def first_group(self):
"""
The first group that contains this node.
:rtype: Group
"""
parent = self.parent
while not isinstance(parent, Group):
parent = parent.parent
return parent
def file_group(self):
"""
Get the file group that this node is in.
:rtype: Group
"""
parent = self.parent
while parent.parent:
parent = parent.parent
return parent
def is_attr(self):
"""
Whether this node is attached to something besides the file
:rtype: bool
"""
return (self.parent
and isinstance(self.parent, Group)
and self.parent.group_type in (GROUP_TYPE.CLASS, GROUP_TYPE.NAMESPACE))
def token_with_ownership(self):
"""
Token which includes what group this is a part of
:rtype: str
"""
if self.is_attr():
return djoin(self.parent.token, self.token)
return self.token
def namespace_ownership(self):
"""
Get the ownership excluding namespace
:rtype: str
"""
parent = self.parent
ret = []
while parent and parent.group_type == GROUP_TYPE.CLASS:
ret = [parent.token] + ret
parent = parent.parent
return djoin(ret)
def label(self):
"""
Labels are what you see on the graph
:rtype: str
"""
if self.line_number is not None:
return f"{self.line_number}: {self.token}()"
return f"{self.token}()"
def remove_from_parent(self):
"""
Remove this node from it's parent. This effectively deletes the node.
:rtype: None
"""
self.first_group().nodes = [n for n in self.first_group().nodes if n != self]
def get_variables(self, line_number=None):
"""
Get variables in-scope on the line number.
This includes all local variables as-well-as outer-scope variables
:rtype: list[Variable]
"""
if line_number is None:
ret = list(self.variables)
else:
# TODO variables should be sorted by scope before line_number
ret = list([v for v in self.variables if v.line_number <= line_number])
if any(v.line_number for v in ret):
ret.sort(key=lambda v: v.line_number, reverse=True)
parent = self.parent
while parent:
ret += parent.get_variables()
parent = parent.parent
return ret
def resolve_variables(self, file_groups):
"""
For all variables, attempt to resolve the Node/Group on points_to.
There is a good chance this will be unsuccessful.
:param list[Group] file_groups:
:rtype: None
"""
for variable in self.variables:
if isinstance(variable.points_to, str):
variable.points_to = _resolve_str_variable(variable, file_groups)
elif isinstance(variable.points_to, Call):
# else, this is a call variable
call = variable.points_to
# Only process Class(); Not a.Class()
if call.is_attr() and not call.definite_constructor:
continue
# Else, assume the call is a constructor.
# iterate through to find the right group
for file_group in file_groups:
for group in file_group.all_groups():
if group.token == call.token:
variable.points_to = group
else:
assert isinstance(variable.points_to, (Node, Group))
def to_dot(self):
"""
Output for graphviz (.dot) files
:rtype: str
"""
attributes = {
'label': self.label(),
'name': self.name(),
'shape': "rect",
'style': 'rounded,filled',
'fillcolor': NODE_COLOR,
}
if self.is_trunk:
attributes['fillcolor'] = TRUNK_COLOR
elif self.is_leaf:
attributes['fillcolor'] = LEAF_COLOR
ret = self.uid + ' ['
for k, v in attributes.items():
ret += f'{k}="{v}" '
ret += ']'
return ret
def to_dict(self):
"""
Output for json files (json graph specification)
:rtype: dict
"""
return {
'uid': self.uid,
'label': self.label(),
'name': self.name(),
}
def _wrap_as_variables(sequence):
"""
Given a list of either Nodes or Groups, wrap them in variables.
This is used in the get_variables method to allow all defined
functions and classes to be defined as variables
:param list[Group|Node] sequence:
:rtype: list[Variable]
"""
return [Variable(el.token, el, el.line_number) for el in sequence]
class Edge():
def __init__(self, node0, node1):
self.node0 = node0
self.node1 = node1
# When we draw the edge, we know the calling function is definitely not a leaf...
# and the called function is definitely not a trunk
node0.is_leaf = False
node1.is_trunk = False
def __repr__(self):
return f"<Edge {self.node0} -> {self.node1}"
def to_dot(self):
'''
Returns string format for embedding in a dotfile. Example output:
node_uid_a -> node_uid_b [color='#aaa' penwidth='2']
:rtype: str
'''
ret = self.node0.uid + ' -> ' + self.node1.uid
ret += f' [color="{EDGE_COLOR}" penwidth="2"]'
return ret
def to_dict(self):
"""
:rtype: dict
"""
return {
'source': self.node0.uid,
'target': self.node1.uid,
'directed': True,
}
class Group():
"""
Groups represent namespaces (classes and modules/files)
"""
def __init__(self, token, group_type, display_type, import_tokens=None,
line_number=None, parent=None, inherits=None):
self.token = token
self.line_number = line_number
self.nodes = []
self.root_node = None
self.subgroups = []
self.parent = parent
self.group_type = group_type
self.display_type = display_type
self.import_tokens = import_tokens or []
self.inherits = inherits or []
assert group_type in GROUP_TYPE
self.uid = "cluster_" + os.urandom(4).hex() # group doesn't work by syntax rules
def __repr__(self):
return f"<Group token={self.token} type={self.display_type}>"
def label(self):
"""
Labels are what you see on the graph
:rtype: str
"""
return f"{self.display_type}: {self.token}"
def filename(self):
"""
The ultimate filename of this group.
:rtype: str
"""
if self.group_type == GROUP_TYPE.FILE:
return self.token
return self.parent.filename()
def add_subgroup(self, sg):
"""
Subgroups are found after initialization. This is how they are added.
:param sg Group:
"""
self.subgroups.append(sg)
def add_node(self, node, is_root=False):
"""
Nodes are found after initialization. This is how they are added.
:param node Node:
:param is_root bool:
"""
self.nodes.append(node)
if is_root:
self.root_node = node
def all_nodes(self):
"""
List of nodes that are part of this group + all subgroups
:rtype: list[Node]
"""
ret = list(self.nodes)
for subgroup in self.subgroups:
ret += subgroup.all_nodes()
return ret
def get_constructor(self):
"""
Return the first constructor for this group - if any
TODO, this excludes the possibility of multiple constructors like
__init__ vs __new__
:rtype: Node|None
"""
assert self.group_type == GROUP_TYPE.CLASS
constructors = [n for n in self.nodes if n.is_constructor]
if constructors:
return constructors[0]
def all_groups(self):
"""
List of groups that are part of this group + all subgroups
:rtype: list[Group]
"""
ret = [self]
for subgroup in self.subgroups:
ret += subgroup.all_groups()
return ret
def get_variables(self, line_number=None):
"""
Get in-scope variables from this group.
This assumes every variable will be in-scope in nested functions
line_number is included for compatibility with Node.get_variables but is not used
:param int line_number:
:rtype: list[Variable]
"""
if self.root_node:
variables = (self.root_node.variables
+ _wrap_as_variables(self.subgroups)
+ _wrap_as_variables(n for n in self.nodes if n != self.root_node))
if any(v.line_number for v in variables):
return sorted(variables, key=lambda v: v.line_number, reverse=True)
return variables
else:
return []
def remove_from_parent(self):
"""
Remove this group from it's parent. This is effectively a deletion
:rtype: None
"""
if self.parent:
self.parent.subgroups = [g for g in self.parent.subgroups if g != self]
def to_dot(self):
"""
Returns string format for embedding in a dotfile. Example output:
subgraph group_uid_a {
node_uid_b node_uid_c;
label='class_name';
...
subgraph group_uid_z {
...
}
...
}
:rtype: str
"""
ret = 'subgraph ' + self.uid + ' {\n'
if self.nodes:
ret += ' '
ret += ' '.join(node.uid for node in self.nodes)
ret += ';\n'
attributes = {
'label': self.label(),
'name': self.token,
'style': 'filled',
}
for k, v in attributes.items():
ret += f' {k}="{v}";\n'
ret += ' graph[style=dotted];\n'
for subgroup in self.subgroups:
ret += ' ' + ('\n'.join(' ' + ln for ln in
subgroup.to_dot().split('\n'))).strip() + '\n'
ret += '};\n'
return ret
| lgpl-3.0 |
unnikrishnankgs/va | venv/lib/python3.5/site-packages/django/views/decorators/vary.py | 586 | 1200 | from functools import wraps
from django.utils.cache import patch_vary_headers
from django.utils.decorators import available_attrs
def vary_on_headers(*headers):
"""
A view decorator that adds the specified headers to the Vary header of the
response. Usage:
@vary_on_headers('Cookie', 'Accept-language')
def index(request):
...
Note that the header names are not case-sensitive.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, headers)
return response
return inner_func
return decorator
def vary_on_cookie(func):
"""
A view decorator that adds "Cookie" to the Vary header of a response. This
indicates that a page's contents depends on cookies. Usage:
@vary_on_cookie
def index(request):
...
"""
@wraps(func, assigned=available_attrs(func))
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, ('Cookie',))
return response
return inner_func
| bsd-2-clause |
vitamincpp/shadowsocks | shadowsocks/lru_cache.py | 12 | 4286 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import collections
import logging
import time
# this LRUCache is optimized for concurrency, not QPS
# n: concurrency, keys stored in the cache
# m: visits not timed out, proportional to QPS * timeout
# get & set is O(1), not O(n). thus we can support very large n
# TODO: if timeout or QPS is too large, then this cache is not very efficient,
# as sweep() causes long pause
class LRUCache(collections.MutableMapping):
"""This class is not thread safe"""
def __init__(self, timeout=60, close_callback=None, *args, **kwargs):
self.timeout = timeout
self.close_callback = close_callback
self._store = {}
self._time_to_keys = collections.defaultdict(list)
self._keys_to_last_time = {}
self._last_visits = collections.deque()
self._closed_values = set()
self.update(dict(*args, **kwargs)) # use the free update to set keys
def __getitem__(self, key):
# O(1)
t = time.time()
self._keys_to_last_time[key] = t
self._time_to_keys[t].append(key)
self._last_visits.append(t)
return self._store[key]
def __setitem__(self, key, value):
# O(1)
t = time.time()
self._keys_to_last_time[key] = t
self._store[key] = value
self._time_to_keys[t].append(key)
self._last_visits.append(t)
def __delitem__(self, key):
# O(1)
del self._store[key]
del self._keys_to_last_time[key]
def __iter__(self):
return iter(self._store)
def __len__(self):
return len(self._store)
def sweep(self):
# O(m)
now = time.time()
c = 0
while len(self._last_visits) > 0:
least = self._last_visits[0]
if now - least <= self.timeout:
break
if self.close_callback is not None:
for key in self._time_to_keys[least]:
if key in self._store:
if now - self._keys_to_last_time[key] > self.timeout:
value = self._store[key]
if value not in self._closed_values:
self.close_callback(value)
self._closed_values.add(value)
for key in self._time_to_keys[least]:
if key in self._store:
if now - self._keys_to_last_time[key] > self.timeout:
del self._store[key]
del self._keys_to_last_time[key]
c += 1
self._last_visits.popleft()
del self._time_to_keys[least]
if c:
self._closed_values.clear()
logging.debug('%d keys swept' % c)
def test():
c = LRUCache(timeout=0.3)
c['a'] = 1
assert c['a'] == 1
time.sleep(0.5)
c.sweep()
assert 'a' not in c
c['a'] = 2
c['b'] = 3
time.sleep(0.2)
c.sweep()
assert c['a'] == 2
assert c['b'] == 3
time.sleep(0.2)
c.sweep()
c['b']
time.sleep(0.2)
c.sweep()
assert 'a' not in c
assert c['b'] == 3
time.sleep(0.5)
c.sweep()
assert 'a' not in c
assert 'b' not in c
global close_cb_called
close_cb_called = False
def close_cb(t):
global close_cb_called
assert not close_cb_called
close_cb_called = True
c = LRUCache(timeout=0.1, close_callback=close_cb)
c['s'] = 1
c['s']
time.sleep(0.1)
c['s']
time.sleep(0.3)
c.sweep()
if __name__ == '__main__':
test()
| apache-2.0 |
potash/scikit-learn | sklearn/datasets/mlcomp.py | 289 | 3855 | # Copyright (c) 2010 Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
"""Glue code to load http://mlcomp.org data as a scikit.learn dataset"""
import os
import numbers
from sklearn.datasets.base import load_files
def _load_document_classification(dataset_path, metadata, set_=None, **kwargs):
if set_ is not None:
dataset_path = os.path.join(dataset_path, set_)
return load_files(dataset_path, metadata.get('description'), **kwargs)
LOADERS = {
'DocumentClassification': _load_document_classification,
# TODO: implement the remaining domain formats
}
def load_mlcomp(name_or_id, set_="raw", mlcomp_root=None, **kwargs):
"""Load a datasets as downloaded from http://mlcomp.org
Parameters
----------
name_or_id : the integer id or the string name metadata of the MLComp
dataset to load
set_ : select the portion to load: 'train', 'test' or 'raw'
mlcomp_root : the filesystem path to the root folder where MLComp datasets
are stored, if mlcomp_root is None, the MLCOMP_DATASETS_HOME
environment variable is looked up instead.
**kwargs : domain specific kwargs to be passed to the dataset loader.
Read more in the :ref:`User Guide <datasets>`.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'filenames', the files holding the raw to learn, 'target', the
classification labels (integer index), 'target_names',
the meaning of the labels, and 'DESCR', the full description of the
dataset.
Note on the lookup process: depending on the type of name_or_id,
will choose between integer id lookup or metadata name lookup by
looking at the unzipped archives and metadata file.
TODO: implement zip dataset loading too
"""
if mlcomp_root is None:
try:
mlcomp_root = os.environ['MLCOMP_DATASETS_HOME']
except KeyError:
raise ValueError("MLCOMP_DATASETS_HOME env variable is undefined")
mlcomp_root = os.path.expanduser(mlcomp_root)
mlcomp_root = os.path.abspath(mlcomp_root)
mlcomp_root = os.path.normpath(mlcomp_root)
if not os.path.exists(mlcomp_root):
raise ValueError("Could not find folder: " + mlcomp_root)
# dataset lookup
if isinstance(name_or_id, numbers.Integral):
# id lookup
dataset_path = os.path.join(mlcomp_root, str(name_or_id))
else:
# assume name based lookup
dataset_path = None
expected_name_line = "name: " + name_or_id
for dataset in os.listdir(mlcomp_root):
metadata_file = os.path.join(mlcomp_root, dataset, 'metadata')
if not os.path.exists(metadata_file):
continue
with open(metadata_file) as f:
for line in f:
if line.strip() == expected_name_line:
dataset_path = os.path.join(mlcomp_root, dataset)
break
if dataset_path is None:
raise ValueError("Could not find dataset with metadata line: " +
expected_name_line)
# loading the dataset metadata
metadata = dict()
metadata_file = os.path.join(dataset_path, 'metadata')
if not os.path.exists(metadata_file):
raise ValueError(dataset_path + ' is not a valid MLComp dataset')
with open(metadata_file) as f:
for line in f:
if ":" in line:
key, value = line.split(":", 1)
metadata[key.strip()] = value.strip()
format = metadata.get('format', 'unknow')
loader = LOADERS.get(format)
if loader is None:
raise ValueError("No loader implemented for format: " + format)
return loader(dataset_path, metadata, set_=set_, **kwargs)
| bsd-3-clause |
andymg/androguard | tests/test_axml.py | 38 | 2563 | #!/usr/bin/env python
import logging
import datetime
import sys
PATH_INSTALL = "./"
sys.path.append(PATH_INSTALL)
from optparse import OptionParser
from androguard.core.analysis import auto
from androguard.core.androconf import set_debug
option_0 = {'name': ('-d', '--directory'), 'help': 'directory input', 'nargs': 1}
option_1 = {'name': ('-v', '--verbose'), 'help': 'add debug', 'action': 'count'}
options = [option_0, option_1]
logger = logging.getLogger("main")
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter("%(message)s"))
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected)),
return (got == expected)
class AndroLog:
def __init__(self, id_file, filename):
self.id_file = id_file
self.filename = filename
def dump(self, msg):
now = datetime.datetime.now()
str_date = now.strftime("%Y-%m-%d %H:%M:%S ")
logger.info(str_date + "%s[%d]: %s" % (self.filename, self.id_file, msg))
def error(self, msg):
now = datetime.datetime.now()
str_date = now.strftime("%Y-%m-%d %H:%M:%S ")
logger.info(str_date + "ERROR %s[%d]: %s" % (self.filename, self.id_file, msg))
import traceback
traceback.print_exc()
class MyAXMLAnalysis(auto.DirectoryAndroAnalysis):
def __init__(self, directory):
super(MyAXMLAnalysis, self).__init__(directory)
def filter_file(self, log, fileraw):
ret, file_type = super(MyAXMLAnalysis, self).filter_file(log, fileraw)
if file_type != "APK" and file_type != "AXML":
return (False, None)
return (ret, file_type)
def analysis_axml(self, log, axml):
log.dump("%s" % str(axml.get_xml_obj()))
return False
def analysis_apk(self, log, apk):
log.dump("%s" % str(apk.get_android_manifest_xml()))
return False
def crash(self, log, why):
log.error(why)
def main(options, arguments):
if options.verbose:
set_debug()
if options.directory:
settings = {
"my": MyAXMLAnalysis(options.directory),
"log": AndroLog,
"max_fetcher": 3,
}
aa = auto.AndroAuto(settings)
aa.go()
if __name__ == "__main__":
parser = OptionParser()
for option in options:
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 |
sdeleeuw/contagement | contagement/settings_shared.py | 1 | 5243 | """
Django settings for contagement project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ''
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'suit',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'rest_framework',
'storages',
'content.apps.ContentConfig',
'posts.apps.PostsConfig',
'pages.apps.PagesConfig',
'pictures.apps.PicturesConfig',
'picture_editor.apps.PictureEditorConfig',
'videos.apps.VideosConfig',
'notes.apps.NotesConfig',
'search.apps.SearchConfig',
'comments.apps.CommentsConfig',
'frontend.apps.FrontendConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'contagement.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'contagement.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'contagement',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
# Django Suit
SUIT_CONFIG = {
'ADMIN_NAME': 'Contagement',
'MENU': (
{'app': 'posts', 'models': ('post', )},
{'app': 'pages', 'models': ('page', )},
{'app': 'pictures', 'models': ('picture', 'watermark')},
{'app': 'videos', 'models': ('video', )},
{'app': 'notes', 'models': ('note', )},
{'app': 'comments', 'models': ('comment', )},
{'app': 'content', 'label': _('Taxonomy'), 'models': (
'category', 'metadatakey', 'metadatavalue', 'tag')},
'auth',
'sites',
)
}
# Django REST Framework
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 100,
}
# Django Storages
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
AWS_STORAGE_BUCKET_NAME = ''
# overwrite files instead of generating a new file name
AWS_S3_FILE_OVERWRITE = True
# select Frankfurt (eu-central-1) as S3 region
AWS_S3_REGION_NAME = 'eu-central-1'
# use Signature Version 4 required for recently added data centers
AWS_S3_SIGNATURE_VERSION = 's3v4'
# disable query parameter authentication, bucket should be public
AWS_QUERYSTRING_AUTH = False
# Project settings
PAGINATE_BY = 20
PICTURES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
PICTURE_VARIANTS = {
'small': {'method': 'border', 'width': 200, 'height': 200, 'format': 'png'},
'medium': {'method': 'thumbnail', 'width': 800, 'height': 800, 'format': 'jpg'},
'large': {'method': 'thumbnail', 'width': 1600, 'height': 1600, 'format': 'jpg'},
}
METADATA_EXPORT_VARIANTS = ('large', )
JPEG_QUALITY = 75
COMMENT_MODELS = ['post', 'page', 'picture', 'note']
| gpl-3.0 |
stdweird/aquilon | lib/python2.6/aquilon/worker/commands/cat_cluster.py | 2 | 2304 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq cat --cluster`."""
from aquilon.aqdb.model import Cluster, MetaCluster
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.dbwrappers.resources import get_resource
from aquilon.worker.templates.base import Plenary
from aquilon.worker.templates.cluster import (PlenaryClusterObject,
PlenaryClusterData)
from aquilon.worker.templates.metacluster import (PlenaryMetaClusterObject,
PlenaryMetaClusterData)
class CommandCatCluster(BrokerCommand):
required_parameters = ["cluster"]
def render(self, session, logger, cluster, data, generate, **arguments):
dbcluster = Cluster.get_unique(session, cluster, compel=True)
dbresource = get_resource(session, dbcluster, **arguments)
if dbresource:
plenary_info = Plenary.get_plenary(dbresource, logger=logger)
else:
if isinstance(dbcluster, MetaCluster):
if data:
plenary_info = PlenaryMetaClusterData(dbcluster, logger=logger)
else:
plenary_info = PlenaryMetaClusterObject(dbcluster, logger=logger)
else:
if data:
plenary_info = PlenaryClusterData(dbcluster, logger=logger)
else:
plenary_info = PlenaryClusterObject(dbcluster, logger=logger)
if generate:
return plenary_info._generate_content()
else:
return plenary_info.read()
| apache-2.0 |
lahosken/pants | tests/python/pants_test/build_graph/test_target.py | 2 | 8043 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os.path
from hashlib import sha1
from pants.base.exceptions import TargetDefinitionException
from pants.base.fingerprint_strategy import DefaultFingerprintStrategy
from pants.build_graph.address import Address
from pants.build_graph.target import Target
from pants_test.base_test import BaseTest
from pants_test.subsystem.subsystem_util import init_subsystem
class TestImplicitSourcesTarget(Target):
default_sources_globs = '*.foo'
class TestImplicitSourcesTargetMulti(Target):
default_sources_globs = ('*.foo', '*.bar')
default_sources_exclude_globs = ('*.baz', '*.qux')
class TargetTest(BaseTest):
def test_derived_from_chain(self):
# add concrete target
concrete = self.make_target('y:concrete', Target)
# add synthetic targets
syn_one = self.make_target('y:syn_one', Target, derived_from=concrete)
syn_two = self.make_target('y:syn_two', Target, derived_from=syn_one)
# validate
self.assertEquals(list(syn_two.derived_from_chain), [syn_one, concrete])
self.assertEquals(list(syn_one.derived_from_chain), [concrete])
self.assertEquals(list(concrete.derived_from_chain), [])
def test_is_synthetic(self):
# add concrete target
concrete = self.make_target('y:concrete', Target)
# add synthetic targets
syn_one = self.make_target('y:syn_one', Target, derived_from=concrete)
syn_two = self.make_target('y:syn_two', Target, derived_from=syn_one)
syn_three = self.make_target('y:syn_three', Target, synthetic=True)
self.assertFalse(concrete.is_synthetic)
self.assertTrue(syn_one.is_synthetic)
self.assertTrue(syn_two.is_synthetic)
self.assertTrue(syn_three.is_synthetic)
def test_empty_traversable_properties(self):
target = self.make_target(':foo', Target)
self.assertSequenceEqual([], list(target.traversable_specs))
self.assertSequenceEqual([], list(target.traversable_dependency_specs))
def test_illegal_kwargs(self):
init_subsystem(Target.Arguments)
with self.assertRaises(Target.Arguments.UnknownArgumentError) as cm:
self.make_target('foo:bar', Target, foobar='barfoo')
self.assertTrue('foobar = barfoo' in str(cm.exception))
self.assertTrue('foo:bar' in str(cm.exception))
def test_unknown_kwargs(self):
options = {Target.Arguments.options_scope: {'ignored': {'Target': ['foobar']}}}
init_subsystem(Target.Arguments, options)
target = self.make_target('foo:bar', Target, foobar='barfoo')
self.assertFalse(hasattr(target, 'foobar'))
def test_target_id_long(self):
long_path = 'dummy'
for i in range(1,30):
long_path = os.path.join(long_path, 'dummy{}'.format(i))
long_target = self.make_target('{}:foo'.format(long_path), Target)
long_id = long_target.id
self.assertEqual(len(long_id), 200)
self.assertEqual(long_id,
'dummy.dummy1.dummy2.dummy3.dummy4.dummy5.dummy6.dummy7.dummy8.dummy9.dummy10.du.'
'c582ce0f60008b3dc8196ae9e6ff5e8c40096974.y20.dummy21.dummy22.dummy23.dummy24.dummy25.'
'dummy26.dummy27.dummy28.dummy29.foo')
def test_target_id_short(self):
short_path = 'dummy'
for i in range(1,10):
short_path = os.path.join(short_path, 'dummy{}'.format(i))
short_target = self.make_target('{}:foo'.format(short_path), Target)
short_id = short_target.id
self.assertEqual(short_id,
'dummy.dummy1.dummy2.dummy3.dummy4.dummy5.dummy6.dummy7.dummy8.dummy9.foo')
def test_implicit_sources(self):
options = {Target.Arguments.options_scope: {'implicit_sources': True}}
init_subsystem(Target.Arguments, options)
target = self.make_target(':a', TestImplicitSourcesTarget)
# Note explicit key_arg.
sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar',
key_arg='sources')
self.assertEqual(sources.filespec, {'globs': ['src/foo/bar/*.foo']})
target = self.make_target(':b', TestImplicitSourcesTargetMulti)
# Note no explicit key_arg, which should behave just like key_arg='sources'.
sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar')
self.assertEqual(sources.filespec, {
'globs': ['src/foo/bar/*.foo', 'src/foo/bar/*.bar'],
'exclude': [{'globs': ['src/foo/bar/*.baz', 'src/foo/bar/*.qux']}],
})
# Ensure that we don't use implicit sources when creating resources fields.
resources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar',
key_arg='resources')
self.assertEqual(resources.filespec, {'globs': []})
def test_implicit_sources_disabled(self):
options = {Target.Arguments.options_scope: {'implicit_sources': False}}
init_subsystem(Target.Arguments, options)
target = self.make_target(':a', TestImplicitSourcesTarget)
sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar')
self.assertEqual(sources.filespec, {'globs': []})
def test_create_sources_field_with_string_fails(self):
target = self.make_target(':a-target', Target)
# No key_arg.
with self.assertRaises(TargetDefinitionException) as cm:
target.create_sources_field(sources='a-string', sources_rel_path='')
self.assertIn("Expected a glob, an address or a list, but was <type \'unicode\'>",
str(cm.exception))
# With key_arg.
with self.assertRaises(TargetDefinitionException) as cm:
target.create_sources_field(sources='a-string', sources_rel_path='', key_arg='my_cool_field')
self.assertIn("Expected 'my_cool_field' to be a glob, an address or a list, but was <type \'unicode\'>",
str(cm.exception))
#could also test address case, but looks like nothing really uses it.
def test_max_recursion(self):
target_a = self.make_target('a', Target)
target_b = self.make_target('b', Target, dependencies=[target_a])
self.make_target('c', Target, dependencies=[target_b])
target_a.inject_dependency(Address.parse('c'))
with self.assertRaises(Target.RecursiveDepthError):
target_a.transitive_invalidation_hash()
def test_transitive_invalidation_hash(self):
target_a = self.make_target('a', Target)
target_b = self.make_target('b', Target, dependencies=[target_a])
target_c = self.make_target('c', Target, dependencies=[target_b])
hasher = sha1()
dep_hash = hasher.hexdigest()[:12]
target_hash = target_a.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_a.transitive_invalidation_hash())
hasher = sha1()
hasher.update(hash_value)
dep_hash = hasher.hexdigest()[:12]
target_hash = target_b.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_b.transitive_invalidation_hash())
hasher = sha1()
hasher.update(hash_value)
dep_hash = hasher.hexdigest()[:12]
target_hash = target_c.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_c.transitive_invalidation_hash())
# Check direct invalidation.
class TestFingerprintStrategy(DefaultFingerprintStrategy):
def direct(self, target):
return True
fingerprint_strategy = TestFingerprintStrategy()
hasher = sha1()
hasher.update(target_b.invalidation_hash(fingerprint_strategy=fingerprint_strategy))
dep_hash = hasher.hexdigest()[:12]
target_hash = target_c.invalidation_hash(fingerprint_strategy=fingerprint_strategy)
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_c.transitive_invalidation_hash(fingerprint_strategy=fingerprint_strategy))
| apache-2.0 |
mikethebeer/cr8 | cr8/run_spec.py | 1 | 10629 | import argh
import os
import itertools
from functools import partial
from cr8 import aio, clients
from .insert_json import to_insert
from .bench_spec import load_spec
from .engine import Runner, Result, run_and_measure, eval_fail_if
from .misc import (
as_bulk_queries,
as_statements,
get_lines,
parse_version,
try_len
)
from .cli import dicts_from_lines
from .log import Logger
BENCHMARK_TABLE = '''
create table if not exists benchmarks (
version_info object (strict) as (
number string,
hash string,
date timestamp
),
statement string,
meta object as (
name string
),
started timestamp,
ended timestamp,
concurrency int,
bulk_size int,
runtime_stats object (strict) as (
avg double,
min double,
max double,
mean double,
error_margin double,
median double,
percentile object as (
"50" double,
"75" double,
"90" double,
"99" double,
"99_9" double
),
n integer,
variance double,
stdev double,
samples array(double)
)
) clustered into 8 shards with (number_of_replicas = '1-3', column_policy='strict')
'''
def _result_to_crate(log, client):
table_created = []
def save_result(result):
if not table_created:
aio.run(client.execute, BENCHMARK_TABLE)
table_created.append(None)
stmt, args = to_insert('benchmarks', result.as_dict())
aio.run(client.execute, stmt, args)
log.result(result)
return save_result
class Executor:
def __init__(self,
spec_dir,
benchmark_hosts,
result_hosts,
log,
fail_if,
sample_mode):
self.benchmark_hosts = benchmark_hosts
self.sample_mode = sample_mode
self.spec_dir = spec_dir
self.client = clients.client(benchmark_hosts)
self.result_client = clients.client(result_hosts)
self.server_version_info = aio.run(self.client.get_server_version)
self.server_version = parse_version(self.server_version_info['number'])
self.log = log
self.create_result = partial(
Result,
version_info=self.server_version_info
)
if fail_if:
self.fail_if = partial(eval_fail_if, fail_if)
else:
self.fail_if = lambda x: None
if result_hosts:
self.process_result = _result_to_crate(self.log, self.result_client)
else:
self.process_result = log.result
def _to_inserts(self, data_spec):
target = data_spec['target']
source = os.path.join(self.spec_dir, data_spec['source'])
dicts = dicts_from_lines(get_lines(source))
return (to_insert(target, d) for d in dicts)
def exec_instructions(self, instructions):
filenames = instructions.statement_files
filenames = (os.path.join(self.spec_dir, i) for i in filenames)
lines = (line for fn in filenames for line in get_lines(fn))
statements = itertools.chain(as_statements(lines), instructions.statements)
for stmt in statements:
aio.run(self.client.execute, stmt)
for data_file in instructions.data_files:
inserts = as_bulk_queries(self._to_inserts(data_file),
data_file.get('bulk_size', 5000))
concurrency = data_file.get('concurrency', 25)
aio.run_many(self.client.execute_many, inserts, concurrency=concurrency)
aio.run(self.client.execute, 'refresh table {target}'.format(target=data_file['target']))
def run_load_data(self, data_spec, meta=None):
inserts = self._to_inserts(data_spec)
statement = next(iter(inserts))[0]
bulk_size = data_spec.get('bulk_size', 5000)
inserts = as_bulk_queries(self._to_inserts(data_spec), bulk_size)
concurrency = data_spec.get('concurrency', 25)
num_records = data_spec.get('num_records')
if num_records:
num_records = max(1, int(num_records / bulk_size))
timed_stats = run_and_measure(
self.client.execute_many, inserts, concurrency, num_records)
self.process_result(self.create_result(
statement=statement,
meta=meta,
timed_stats=timed_stats,
concurrency=concurrency,
bulk_size=bulk_size,
))
def _skip_message(self, min_version, stmt):
msg = ('## Skipping (Version {server_version} instead of {min_version}):\n'
' Statement: {statement:.70}')
msg = msg.format(
statement=stmt,
min_version='.'.join((str(x) for x in min_version)),
server_version='.'.join((str(x) for x in self.server_version)))
return msg
def run_queries(self, queries, meta=None):
for query in queries:
stmt = query['statement']
iterations = query.get('iterations', 1)
duration = query.get('duration', None)
concurrency = query.get('concurrency', 1)
args = query.get('args')
bulk_args = query.get('bulk_args')
min_version = parse_version(query.get('min_version'))
if min_version and min_version > self.server_version:
self.log.info(self._skip_message(min_version, stmt))
continue
mode_desc = 'Duration' if duration else 'Iterations'
self.log.info(
(f'\n## Running Query:\n'
f' Statement:\n'
f' {stmt}\n'
f' Concurrency: {concurrency}\n'
f' {mode_desc}: {duration or iterations}')
)
with Runner(self.benchmark_hosts, concurrency, self.sample_mode) as runner:
timed_stats = runner.run(
stmt,
iterations=iterations,
duration=duration,
args=args,
bulk_args=bulk_args
)
result = self.create_result(
statement=stmt,
meta=meta,
timed_stats=timed_stats,
concurrency=concurrency,
bulk_size=try_len(bulk_args)
)
self.process_result(result)
self.fail_if(result)
def __enter__(self):
return self
def __exit__(self, *ex):
self.client.close()
self.result_client.close()
def do_run_spec(spec,
benchmark_hosts,
*,
log,
sample_mode,
result_hosts=None,
action=None,
fail_if=None):
with Executor(
spec_dir=os.path.dirname(spec),
benchmark_hosts=benchmark_hosts,
result_hosts=result_hosts,
log=log,
fail_if=fail_if,
sample_mode=sample_mode
) as executor:
spec = load_spec(spec)
try:
if not action or 'setup' in action:
log.info('# Running setUp')
executor.exec_instructions(spec.setup)
log.info('# Running benchmark')
if spec.load_data and (not action or 'load_data' in action):
for data_spec in spec.load_data:
executor.run_load_data(data_spec, spec.meta)
if spec.queries and (not action or 'queries' in action):
executor.run_queries(spec.queries, spec.meta)
finally:
if not action or 'teardown' in action:
log.info('# Running tearDown')
executor.exec_instructions(spec.teardown)
@argh.arg('benchmark_hosts', type=str)
@argh.arg('-of', '--output-fmt', choices=['json', 'text'], default='text')
@argh.arg('--action',
choices=['setup', 'teardown', 'queries', 'load_data'],
action='append')
@argh.arg('--logfile-info', help='Redirect info messages to a file')
@argh.arg('--logfile-result', help='Redirect benchmark results to a file')
@argh.arg('--sample-mode', choices=('all', 'reservoir'),
help='Method used for sampling', default='reservoir')
@argh.wrap_errors([KeyboardInterrupt, BrokenPipeError] + clients.client_errors)
def run_spec(spec,
benchmark_hosts,
result_hosts=None,
output_fmt=None,
logfile_info=None,
logfile_result=None,
action=None,
fail_if=None,
sample_mode='reservoir'):
"""Run a spec file, executing the statements on the benchmark_hosts.
Short example of a spec file:
[setup]
statement_files = ["sql/create_table.sql"]
[[setup.data_files]]
target = "t"
source = "data/t.json"
[[queries]]
statement = "select count(*) from t"
iterations = 2000
concurrency = 10
[teardown]
statements = ["drop table t"]
See https://github.com/mfussenegger/cr8/tree/master/specs
for more examples.
Args:
spec: path to a spec file
benchmark_hosts: hostname[:port] pairs of Crate nodes
result_hosts: optional hostname[:port] Crate node pairs into which the
runtime statistics should be inserted.
output_fmt: output format
action: Optional action to execute.
Default is to execute all actions - setup, queries and teardown.
If present only the specified action will be executed.
The argument can be provided multiple times to execute more than
one action.
fail-if: An expression that causes cr8 to exit with a failure if it
evaluates to true.
The expression can contain formatting expressions for:
- runtime_stats
- statement
- meta
- concurrency
- bulk_size
For example:
--fail-if "{runtime_stats.mean} > 1.34"
"""
with Logger(output_fmt=output_fmt,
logfile_info=logfile_info,
logfile_result=logfile_result) as log:
do_run_spec(
spec=spec,
benchmark_hosts=benchmark_hosts,
log=log,
result_hosts=result_hosts,
action=action,
fail_if=fail_if,
sample_mode=sample_mode
)
def main():
argh.dispatch_command(run_spec)
if __name__ == "__main__":
main()
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.