repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
sagiss/sardana
src/sardana/taurus/qt/qtcore/tango/sardana/__init__.py
1
1204
#!/usr/bin/env python ############################################################################## ## ## This file is part of Sardana ## ## http://www.sardana-controls.org/ ## ## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain ## ## Sardana is free software: you can redistribute it and/or modify ## it under the terms of the GNU Lesser General Public License as published by ## the Free Software Foundation, either version 3 of the License, or ## (at your option) any later version. ## ## Sardana is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU Lesser General Public License for more details. ## ## You should have received a copy of the GNU Lesser General Public License ## along with Sardana. If not, see <http://www.gnu.org/licenses/>. ## ############################################################################## """ Sardana extension for taurus Qt """ __docformat__ = 'restructuredtext' def registerExtensions(): from . import pool from . import macroserver pool.registerExtensions() macroserver.registerExtensions()
lgpl-3.0
Florianboux/zds-site
zds/gallery/migrations/0001_initial.py
4
29612
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'UserGallery' db.create_table( u'gallery_usergallery', ((u'id', self.gf('django.db.models.fields.AutoField')( primary_key=True)), ('user', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['auth.User'])), ('gallery', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['gallery.Gallery'])), ('mode', self.gf('django.db.models.fields.CharField')( default='R', max_length=1)), )) db.send_create_signal(u'gallery', ['UserGallery']) # Adding model 'Image' db.create_table( u'gallery_image', ((u'id', self.gf('django.db.models.fields.AutoField')( primary_key=True)), ('gallery', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['gallery.Gallery'])), ('title', self.gf('django.db.models.fields.CharField')( max_length=80, null=True, blank=True)), ('slug', self.gf('django.db.models.fields.SlugField')( max_length=80)), ('physical', self.gf('django.db.models.fields.files.ImageField')( max_length=100)), ('thumb', self.gf('django.db.models.fields.files.ImageField')( max_length=100, null=True, blank=True)), ('medium', self.gf('django.db.models.fields.files.ImageField')( max_length=100, null=True, blank=True)), ('legend', self.gf('django.db.models.fields.CharField')( max_length=80, null=True, blank=True)), ('pubdate', self.gf('django.db.models.fields.DateTimeField')( auto_now_add=True, blank=True)), ('update', self.gf('django.db.models.fields.DateTimeField')( null=True, blank=True)), )) db.send_create_signal(u'gallery', ['Image']) # Adding model 'Gallery' db.create_table( u'gallery_gallery', ((u'id', self.gf('django.db.models.fields.AutoField')( primary_key=True)), ('title', self.gf('django.db.models.fields.CharField')( max_length=80)), ('subtitle', self.gf('django.db.models.fields.CharField')( max_length=200)), ('slug', self.gf('django.db.models.fields.SlugField')( max_length=80)), ('pubdate', self.gf('django.db.models.fields.DateTimeField')( auto_now_add=True, blank=True)), ('update', self.gf('django.db.models.fields.DateTimeField')( null=True, blank=True)), )) db.send_create_signal(u'gallery', ['Gallery']) def backwards(self, orm): # Deleting model 'UserGallery' db.delete_table(u'gallery_usergallery') # Deleting model 'Image' db.delete_table(u'gallery_image') # Deleting model 'Gallery' db.delete_table(u'gallery_gallery') models = { u'auth.group': { 'Meta': { 'object_name': 'Group'}, u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'name': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '80'}), 'permissions': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})}, u'auth.permission': { 'Meta': { 'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ( 'django.db.models.fields.CharField', [], { 'max_length': '100'}), 'content_type': ( 'django.db.models.fields.related.ForeignKey', [], { 'to': u"orm['contenttypes.ContentType']"}), u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'name': ( 'django.db.models.fields.CharField', [], { 'max_length': '50'})}, u'auth.user': { 'Meta': { 'object_name': 'User'}, 'date_joined': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now'}), 'email': ( 'django.db.models.fields.EmailField', [], { 'max_length': '75', 'blank': 'True'}), 'first_name': ( 'django.db.models.fields.CharField', [], { 'max_length': '30', 'blank': 'True'}), 'groups': ( 'django.db.models.fields.related.ManyToManyField', [], { 'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'is_active': ( 'django.db.models.fields.BooleanField', [], { 'default': 'True'}), 'is_staff': ( 'django.db.models.fields.BooleanField', [], { 'default': 'False'}), 'is_superuser': ( 'django.db.models.fields.BooleanField', [], { 'default': 'False'}), 'last_login': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now'}), 'last_name': ( 'django.db.models.fields.CharField', [], { 'max_length': '30', 'blank': 'True'}), 'password': ( 'django.db.models.fields.CharField', [], { 'max_length': '128'}), 'user_permissions': ( 'django.db.models.fields.related.ManyToManyField', [], { 'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '30'})}, u'contenttypes.contenttype': { 'Meta': { 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ( 'django.db.models.fields.CharField', [], { 'max_length': '100'}), u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'model': ( 'django.db.models.fields.CharField', [], { 'max_length': '100'}), 'name': ( 'django.db.models.fields.CharField', [], { 'max_length': '100'})}, u'gallery.gallery': { 'Meta': { 'object_name': 'Gallery'}, u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'pubdate': ( 'django.db.models.fields.DateTimeField', [], { 'auto_now_add': 'True', 'blank': 'True'}), 'slug': ( 'django.db.models.fields.SlugField', [], { 'max_length': '80'}), 'subtitle': ( 'django.db.models.fields.CharField', [], { 'max_length': '200'}), 'title': ( 'django.db.models.fields.CharField', [], { 'max_length': '80'}), 'update': ( 'django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True'})}, u'gallery.image': { 'Meta': { 'object_name': 'Image'}, 'gallery': ( 'django.db.models.fields.related.ForeignKey', [], { 'to': u"orm['gallery.Gallery']"}), u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'legend': ( 'django.db.models.fields.CharField', [], { 'max_length': '80', 'null': 'True', 'blank': 'True'}), 'medium': ( 'django.db.models.fields.files.ImageField', [], { 'max_length': '100', 'null': 'True', 'blank': 'True'}), 'physical': ( 'django.db.models.fields.files.ImageField', [], { 'max_length': '100'}), 'pubdate': ( 'django.db.models.fields.DateTimeField', [], { 'auto_now_add': 'True', 'blank': 'True'}), 'slug': ( 'django.db.models.fields.SlugField', [], { 'max_length': '80'}), 'thumb': ( 'django.db.models.fields.files.ImageField', [], { 'max_length': '100', 'null': 'True', 'blank': 'True'}), 'title': ( 'django.db.models.fields.CharField', [], { 'max_length': '80', 'null': 'True', 'blank': 'True'}), 'update': ( 'django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True'})}, u'gallery.usergallery': { 'Meta': { 'object_name': 'UserGallery'}, 'gallery': ( 'django.db.models.fields.related.ForeignKey', [], { 'to': u"orm['gallery.Gallery']"}), u'id': ( 'django.db.models.fields.AutoField', [], { 'primary_key': 'True'}), 'mode': ( 'django.db.models.fields.CharField', [], { 'default': "'R'", 'max_length': '1'}), 'user': ( 'django.db.models.fields.related.ForeignKey', [], { 'to': u"orm['auth.User']"})}} complete_apps = ['gallery']
gpl-3.0
gcode-mirror/git-core
contrib/svn-fe/svnrdump_sim.py
328
2044
#!/usr/bin/python """ Simulates svnrdump by replaying an existing dump from a file, taking care of the specified revision range. To simulate incremental imports the environment variable SVNRMAX can be set to the highest revision that should be available. """ import sys import os if sys.hexversion < 0x02040000: # The limiter is the ValueError() calls. This may be too conservative sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n") sys.exit(1) def getrevlimit(): var = 'SVNRMAX' if var in os.environ: return os.environ[var] return None def writedump(url, lower, upper): if url.startswith('sim://'): filename = url[6:] if filename[-1] == '/': filename = filename[:-1] # remove terminating slash else: raise ValueError('sim:// url required') f = open(filename, 'r') state = 'header' wroterev = False while(True): l = f.readline() if l == '': break if state == 'header' and l.startswith('Revision-number: '): state = 'prefix' if state == 'prefix' and l == 'Revision-number: %s\n' % lower: state = 'selection' if not upper == 'HEAD' and state == 'selection' and \ l == 'Revision-number: %s\n' % upper: break if state == 'header' or state == 'selection': if state == 'selection': wroterev = True sys.stdout.write(l) return wroterev if __name__ == "__main__": if not (len(sys.argv) in (3, 4, 5)): print("usage: %s dump URL -rLOWER:UPPER") sys.exit(1) if not sys.argv[1] == 'dump': raise NotImplementedError('only "dump" is suppported.') url = sys.argv[2] r = ('0', 'HEAD') if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r': r = sys.argv[3][2:].lstrip().split(':') if not getrevlimit() is None: r[1] = getrevlimit() if writedump(url, r[0], r[1]): ret = 0 else: ret = 1 sys.exit(ret)
gpl-2.0
suchow/psiTurk
psiturk/psiturk_org_services.py
1
12450
# -*- coding: utf-8 -*- """ This module """ import os import urllib2 import json import requests from psiturk.version import version_number import git import subprocess import signal import struct from sys import platform as _platform from psiturk.psiturk_config import PsiturkConfig import psutil class PsiturkOrgServices(object): """ PsiturkOrgServices this class provides an interface to the API provided by the psiturk_org website. The two main features of this API are registering secure ads and providing tunnel access see: https://github.com/NYUCCL/api-psiturk-org """ def __init__(self, key, secret): # 'https://api.psiturk.org' # by default for now self.api_server = 'https://api.psiturk.org' self.ad_server = 'https://ad.psiturk.org' self.sandbox_ad_server = 'https://sandbox.ad.psiturk.org' self.update_credentials(key, secret) if not self.check_credentials(): print 'WARNING *****************************' print 'Sorry, psiTurk Credentials invalid.\nYou will only be able '\ + 'to test experiments locally until you enter\nvalid '\ + 'credentials in the psiTurk Access section of ' \ + '~/.psiturkconfig.\n Get your credentials at '\ + 'https://www.psiturk.org/login.\n' def check_credentials(self): ''' Check credentials ''' req = requests.get(self.api_server + '/api/ad', auth=(self.access_key, self.secret_key)) # Not sure 500 server error should be included here if req.status_code in [401, 403, 500]: return False else: return True def update_credentials(self, key, secret): ''' Update credentials ''' self.access_key = key self.secret_key = secret def connect(self, server): """ connect: "connects to server" since the is a fairly basic API, just allows overriding of which Ad server you are talking to """ self.api_server = server def get_system_status(self): """ get_system_status: """ try: api_server_status_link = self.api_server + '/status_msg?version=' +\ version_number response = urllib2.urlopen(api_server_status_link, timeout=1) status_msg = json.load(response)['status'] except urllib2.HTTPError: status_msg = "Sorry, can't connect to psiturk.org, please check\ your internet connection.\nYou will not be able to create new\ hits, but testing locally should work.\n" return status_msg @classmethod def get_my_ip(cls): """ Asks and external server what your ip appears to be (useful is running from behind a NAT/wifi router). Of course, incoming port to the router must be forwarded correctly. """ if 'OPENSHIFT_SECRET_TOKEN' in os.environ: my_ip = os.environ['OPENSHIFT_APP_DNS'] else: my_ip = json.load(urllib2.urlopen( 'http://httpbin.org/ip' ))['origin'] return my_ip def create_record(self, name, content, username, password): ''' Create record ''' #headers = {'key': username, 'secret': password} req = requests.post(self.api_server + '/api/' + name, data=json.dumps(content), auth=(username, password)) return req def update_record(self, name, recordid, content, username, password): ''' Update record ''' # headers = {'key': username, 'secret': password} req = requests.put(self.api_server + '/api/' + name + '/' + str(recordid), data=json.dumps(content), auth=(username, password)) return req def delete_record(self, name, recordid, username, password): ''' Delete record ''' #headers = {'key': username, 'secret': password} req = requests.delete(self.api_server + '/api/' + name + '/' + str(recordid), auth=(username, password)) return req def query_records(self, name, username, password, query=''): ''' Query records ''' #headers = {'key': username, 'secret': password} req = requests.get(self.api_server + '/api/' + name + "/" + query, auth=(username, password)) return req def get_ad_url(self, ad_id, sandbox): """ get_ad_url: gets ad server thing """ if sandbox: return self.sandbox_ad_server + '/view/' + str(ad_id) else: return self.ad_server + '/view/' + str(ad_id) def set_ad_hitid(self, ad_id, hit_id, sandbox): """ get_ad_hitid: updates the ad with the corresponding hitid """ if sandbox: req = self.update_record('sandboxad', ad_id, {'amt_hit_id':hit_id}, self.access_key, self.secret_key) else: req = self.update_record('ad', ad_id, {'amt_hit_id':hit_id}, self.access_key, self.secret_key) if req.status_code == 201: return True else: return False def create_ad(self, ad_content): """ create_ad: """ if not 'is_sandbox' in ad_content: return False else: if ad_content['is_sandbox']: req = self.create_record( 'sandboxad', ad_content, self.access_key, self.secret_key ) else: req = self.create_record( 'ad', ad_content, self.access_key, self.secret_key ) if req.status_code == 201: return req.json()['ad_id'] else: return False def download_experiment(self, experiment_id): """ download_experiment: """ req = self.query_records('experiment', self.access_key, self.secret_key, query='download/'+experiment_id) print req.text return class ExperimentExchangeServices(object): """ ExperimentExchangeServices this class provides a non-authenticated interface to the API provided by the psiturk_org website. the feature is interfacing with the experiment exchange see: https://github.com/NYUCCL/api-psiturk-org """ def __init__(self): # 'https://api.psiturk.org' # by default for now self.api_server = 'https://api.psiturk.org' def query_records_no_auth(self, name, query=''): ''' Query records without authorization ''' #headers = {'key': username, 'secret': password} req = requests.get(self.api_server + '/api/' + name + "/" + query) return req def download_experiment(self, experiment_id): """ download_experiment: """ req = self.query_records_no_auth('experiment', query='download/'+experiment_id) if req.status_code == 404: print "Sorry, no experiment matching id # " + experiment_id print "Please double check the code you obtained on the\ http://psiturk.org/ee" else: # Check if folder with same name already exists. expinfo = req.json() gitr = requests.get(expinfo['git_url']).json() if os.path.exists('./'+gitr['name']): print "*"*20 print "Sorry, you already have a file or folder named\ "+gitr['name']+". Please rename or delete it before trying\ to download this experiment. You can do this by typing `rm\ -rf " + gitr['name'] + "`" print "*"*20 return if "clone_url" in gitr: git.Git().clone(gitr["clone_url"]) print "="*20 print "Downloading..." print "Name: " + expinfo['name'] print "Downloads: " + str(expinfo['downloads']) print "Keywords: " + expinfo['keywords'] print "psiTurk Version: " +\ str(expinfo['psiturk_version_string']) print "URL: http://psiturk.org/ee/"+experiment_id print "\n" print "Experiment downloaded into the `" + gitr['name'] + "`\ folder of the current directory" print "Type 'cd " + gitr['name'] + "` then run the `psiturk`\ command." print "="*20 else: print "Sorry, experiment not located on github. You might\ contact the author of this experiment. Experiment NOT\ downloaded." return class TunnelServices(object): ''' Allow psiTurk to puncture firewalls using reverse tunnelling.''' def __init__(self): config = PsiturkConfig() config.load_config() self.access_key = os.getenv('psiturk_access_key_id', config.get("psiTurk Access", "psiturk_access_key_id")) self.secret_key = os.getenv('psiturk_secret_access_id', config.get("psiTurk Access", "psiturk_secret_access_id")) self.local_port = os.getenv('PORT', config.get("Server Parameters", "port")) self.is_open = False self.tunnel_port = 8000 # Set by tunnel server self.tunnel_host = 'tunnel.psiturk.org' self.tunnel_server = os.path.join(os.path.dirname(__file__), "tunnel/ngrok") self.tunnel_config = os.path.join(os.path.dirname(__file__), "tunnel/ngrok-config") @classmethod def is_compatible(cls): ''' Check OS ''' is_64bit = struct.calcsize('P')*8 == 64 if (_platform == "darwin" and is_64bit): return True else: print("Linux tunnels are currenlty unsupported. Please notify "\ "authors@psiturk.org\nif you'd like to see this feature.") return False def get_tunnel_ad_url(self): ''' Get tunnel hostname from psiturk.org ''' req = requests.get('https://api.psiturk.org/api/tunnel', auth=(self.access_key, self.secret_key)) if req.status_code in [401, 403, 500]: print(req.content) return False else: return req.json()['tunnel_hostname'] def change_tunnel_ad_url(self): ''' Change tunnel ad url. ''' if self.is_open: self.close() req = requests.delete('https://api.psiturk.org/api/tunnel/', auth=(self.access_key, self.secret_key)) # the request content here actually will include the tunnel_hostname # if needed or wanted. if req.status_code in [401, 403, 500]: print(req.content) return False def open(self): ''' Open tunnel ''' if self.is_compatible(): tunnel_ad_url = self.get_tunnel_ad_url() if not tunnel_ad_url: return("Tunnel server appears to be down.") cmd = '%s -subdomain=%s -config=%s -log=stdout %s 2>&1 > server.log' \ %(self.tunnel_server, tunnel_ad_url, self.tunnel_config, self.local_port) self.tunnel = subprocess.Popen(cmd, shell=True) self.url = '%s.%s' %(tunnel_ad_url, self.tunnel_host) self.full_url = 'http://%s.%s:%s' %(tunnel_ad_url, self.tunnel_host, self.tunnel_port) self.is_open = True print "Tunnel URL: %s" % self.full_url print "Hint: In OSX, you can open a terminal link using cmd + click" def close(self): ''' Close tunnel ''' parent_pid = psutil.Process(self.tunnel.pid) child_pid = parent_pid.get_children(recursive=True) for pid in child_pid: pid.send_signal(signal.SIGTERM) self.is_open = False
mit
CasataliaLabs/biscuit_drishtiman
Pmw-2.0.0/build/lib.linux-x86_64-2.7/Pmw/Pmw_1_3_3/tests/ScrolledCanvas_test.py
2
3011
import Test import Pmw Test.initialise() c = Pmw.ScrolledCanvas def _createOvals(): w = Test.currentWidget() w.create_oval(50, 50, 150, 100, fill = 'red') w.create_oval(100, 50, 150, 150, fill = 'blue') w.create_oval(50, 100, 200, 350, fill = 'yellow') def _createWindow(): w = Test.currentWidget() lb = Pmw.ScrolledListBox(w.interior(), items = range(20), listbox_height = 6) w.create_window(300, 100, window = lb) def _testYView(doBottom): w = Test.currentWidget() top, bottom = w.yview() if type(top) != type(0.0) or type(bottom) != type(0.0): return 'bad type ' + str(top) + ' ' + str(bottom) if doBottom: if bottom != 1.0: return 'bottom is ' + str(bottom) else: if top != 0.0: return 'top is ' + str(top) kw_1 = {'labelpos': 'n', 'label_text': 'ScrolledCanvas', 'borderframe' : 1} tests_1 = ( (c.pack, (), {'padx' : 10, 'pady' : 10, 'fill' : 'both', 'expand' : 1}), (Test.num_options, (), 8), (_createOvals, ()), (c.resizescrollregion, ()), (_createWindow, ()), (c.resizescrollregion, ()), ('hull_background', 'aliceblue'), ('Scrollbar_borderwidth', 3), ('hull_cursor', 'gumby'), ('label_text', 'Label'), ('Scrollbar_repeatdelay', 200), ('Scrollbar_repeatinterval', 105), ('vscrollmode', 'none'), ('vscrollmode', 'static'), ('vscrollmode', 'dynamic'), ('hscrollmode', 'none'), ('hscrollmode', 'static'), ('hscrollmode', 'dynamic'), ('Scrollbar_width', 20), ('vscrollmode', 'bogus', 'ValueError: bad vscrollmode ' + 'option "bogus": should be static, dynamic, or none'), ('hscrollmode', 'bogus', 'ValueError: bad hscrollmode ' + 'option "bogus": should be static, dynamic, or none'), (c.yview, ('moveto', 0.0)), (_testYView, 0), (c.yview, ('moveto', 0.02)), (c.yview, ('moveto', 0.04)), (c.yview, ('moveto', 0.06)), (c.yview, ('moveto', 0.08)), (c.yview, ('moveto', 0.10)), (c.yview, ('moveto', 0.12)), (c.yview, ('moveto', 0.14)), (c.yview, ('moveto', 0.16)), (c.yview, ('moveto', 0.18)), (c.yview, ('moveto', 0.20)), (c.yview, ('moveto', 0.22)), (c.yview, ('moveto', 0.24)), (c.yview, ('moveto', 0.26)), (c.yview, ('moveto', 0.28)), (c.yview, ('moveto', 0.98)), (_testYView, 1), (c.yview, ('scroll', -1, 'page')), (c.yview, ('scroll', -1, 'page')), (_testYView, 0), (c.yview, ('scroll', 1, 'page')), (c.yview, ('scroll', 1, 'page')), (_testYView, 1), ) kw_2 = { 'hscrollmode' : 'dynamic', 'label_text' : 'Label', 'labelpos' : 'n', 'scrollmargin': 20, 'canvasmargin': 20, 'usehullsize': 1, 'hull_width' : 500, 'hull_height' : 200, } tests_2 = ( (c.pack, (), {'padx' : 10, 'pady' : 10, 'fill' : 'both', 'expand' : 1}), ) alltests = ( (tests_1, kw_1), (tests_2, kw_2), ) testData = ((Pmw.ScrolledCanvas, alltests),) if __name__ == '__main__': Test.runTests(testData)
gpl-3.0
arnaudgelas/VTK
Examples/Rendering/Python/FilterCADPart.py
42
2338
#!/usr/bin/env python # This simple example shows how to do simple filtering in a pipeline. # See CADPart.py and Cylinder.py for related information. import vtk from vtk.util.colors import light_grey from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() # This creates a polygonal cylinder model with eight circumferential # facets. part = vtk.vtkSTLReader() part.SetFileName(VTK_DATA_ROOT + "/Data/42400-IDGH.stl") # A filter is a module that takes at least one input and produces at # least one output. The SetInput and GetOutput methods are used to do # the connection. What is returned by GetOutput is a particulat # dataset type. If the type is compatible with the SetInput method, # then the filters can be connected together. # # Here we add a filter that computes surface normals from the geometry. shrink = vtk.vtkShrinkPolyData() shrink.SetInputConnection(part.GetOutputPort()) shrink.SetShrinkFactor(0.85) # The mapper is responsible for pushing the geometry into the graphics # library. It may also do color mapping, if scalars or other # attributes are defined. partMapper = vtk.vtkPolyDataMapper() partMapper.SetInputConnection(shrink.GetOutputPort()) # The LOD actor is a special type of actor. It will change appearance # in order to render faster. At the highest resolution, it renders # ewverything just like an actor. The middle level is a point cloud, # and the lowest level is a simple bounding box. partActor = vtk.vtkLODActor() partActor.SetMapper(partMapper) partActor.GetProperty().SetColor(light_grey) partActor.RotateX(30.0) partActor.RotateY(-45.0) # Create the graphics structure. The renderer renders into the # render window. The render window interactor captures mouse events # and will perform appropriate camera or actor manipulation # depending on the nature of the events. ren = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # Add the actors to the renderer, set the background and size ren.AddActor(partActor) ren.SetBackground(0.1, 0.2, 0.4) renWin.SetSize(200, 200) # We'll zoom in a little by accessing the camera and invoking a "Zoom" # method on it. ren.ResetCamera() ren.GetActiveCamera().Zoom(1.5) iren.Initialize() renWin.Render() # Start the event loop. iren.Start()
bsd-3-clause
edx/edx-platform
lms/djangoapps/course_home_api/outline/v1/serializers.py
2
4584
""" Outline Tab Serializers. """ from django.utils.translation import ngettext from rest_framework import serializers from lms.djangoapps.course_home_api.dates.v1.serializers import DateSummarySerializer from lms.djangoapps.course_home_api.progress.v1.serializers import CertificateDataSerializer from lms.djangoapps.course_home_api.mixins import DatesBannerSerializerMixin, VerifiedModeSerializerMixin class CourseBlockSerializer(serializers.Serializer): """ Serializer for Course Block Objects """ blocks = serializers.SerializerMethodField() def get_blocks(self, block): block_key = block['id'] block_type = block['type'] children = block.get('children', []) if block_type != 'sequential' else [] # Don't descend past sequential description = block.get('format') display_name = block['display_name'] enable_links = self.context.get('enable_links') graded = block.get('graded') icon = None num_graded_problems = block.get('num_graded_problems', 0) scored = block.get('scored') if num_graded_problems and block_type == 'sequential': questions = ngettext('({number} Question)', '({number} Questions)', num_graded_problems) display_name += ' ' + questions.format(number=num_graded_problems) if graded and scored: icon = 'fa-pencil-square-o' if 'special_exam_info' in block: description = block['special_exam_info'].get('short_description') icon = block['special_exam_info'].get('suggested_icon', 'fa-pencil-square-o') serialized = { block_key: { 'children': [child['id'] for child in children], 'complete': block.get('complete', False), 'description': description, 'display_name': display_name, 'due': block.get('due'), 'effort_activities': block.get('effort_activities'), 'effort_time': block.get('effort_time'), 'icon': icon, 'id': block_key, 'lms_web_url': block['lms_web_url'] if enable_links else None, 'legacy_web_url': block['legacy_web_url'] if enable_links else None, 'resume_block': block.get('resume_block', False), 'type': block_type, 'has_scheduled_content': block.get('has_scheduled_content'), }, } for child in children: serialized.update(self.get_blocks(child)) return serialized class CourseGoalsSerializer(serializers.Serializer): """ Serializer for Course Goal data """ goal_options = serializers.ListField() selected_goal = serializers.DictField() class CourseToolSerializer(serializers.Serializer): """ Serializer for Course Tool Objects """ analytics_id = serializers.CharField() title = serializers.CharField() url = serializers.SerializerMethodField() def get_url(self, tool): course_overview = self.context.get('course_overview') url = tool.url(course_overview.id) request = self.context.get('request') return request.build_absolute_uri(url) class DatesWidgetSerializer(serializers.Serializer): """ Serializer for Dates Widget data """ course_date_blocks = DateSummarySerializer(many=True) dates_tab_link = serializers.CharField() user_timezone = serializers.CharField() class EnrollAlertSerializer(serializers.Serializer): """ Serializer for enroll alert information """ can_enroll = serializers.BooleanField() extra_text = serializers.CharField() class ResumeCourseSerializer(serializers.Serializer): """ Serializer for resume course data """ has_visited_course = serializers.BooleanField() url = serializers.URLField() class OutlineTabSerializer(DatesBannerSerializerMixin, VerifiedModeSerializerMixin, serializers.Serializer): """ Serializer for the Outline Tab """ access_expiration = serializers.DictField() cert_data = CertificateDataSerializer() course_blocks = CourseBlockSerializer() course_goals = CourseGoalsSerializer() course_tools = CourseToolSerializer(many=True) dates_widget = DatesWidgetSerializer() enroll_alert = EnrollAlertSerializer() handouts_html = serializers.CharField() has_ended = serializers.BooleanField() offer = serializers.DictField() resume_course = ResumeCourseSerializer() welcome_message_html = serializers.CharField()
agpl-3.0
crazcalm/AngelHack_python34
myenv/Lib/site-packages/jinja2/testsuite/regression.py
414
8382
# -*- coding: utf-8 -*- """ jinja2.testsuite.regression ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests corner cases and bugs. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import unittest from jinja2.testsuite import JinjaTestCase from jinja2 import Template, Environment, DictLoader, TemplateSyntaxError, \ TemplateNotFound, PrefixLoader from jinja2._compat import text_type env = Environment() class CornerTestCase(JinjaTestCase): def test_assigned_scoping(self): t = env.from_string(''' {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {{- item -}} ''') assert t.render(item=42) == '[1][2][3][4]42' t = env.from_string(''' {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {%- set item = 42 %} {{- item -}} ''') assert t.render() == '[1][2][3][4]42' t = env.from_string(''' {%- set item = 42 %} {%- for item in (1, 2, 3, 4) -%} [{{ item }}] {%- endfor %} {{- item -}} ''') assert t.render() == '[1][2][3][4]42' def test_closure_scoping(self): t = env.from_string(''' {%- set wrapper = "<FOO>" %} {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {{- wrapper -}} ''') assert t.render() == '[1][2][3][4]<FOO>' t = env.from_string(''' {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {%- set wrapper = "<FOO>" %} {{- wrapper -}} ''') assert t.render() == '[1][2][3][4]<FOO>' t = env.from_string(''' {%- for item in (1, 2, 3, 4) %} {%- macro wrapper() %}[{{ item }}]{% endmacro %} {{- wrapper() }} {%- endfor %} {{- wrapper -}} ''') assert t.render(wrapper=23) == '[1][2][3][4]23' class BugTestCase(JinjaTestCase): def test_keyword_folding(self): env = Environment() env.filters['testing'] = lambda value, some: value + some assert env.from_string("{{ 'test'|testing(some='stuff') }}") \ .render() == 'teststuff' def test_extends_output_bugs(self): env = Environment(loader=DictLoader({ 'parent.html': '(({% block title %}{% endblock %}))' })) t = env.from_string('{% if expr %}{% extends "parent.html" %}{% endif %}' '[[{% block title %}title{% endblock %}]]' '{% for item in [1, 2, 3] %}({{ item }}){% endfor %}') assert t.render(expr=False) == '[[title]](1)(2)(3)' assert t.render(expr=True) == '((title))' def test_urlize_filter_escaping(self): tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}') assert tmpl.render() == '<a href="http://www.example.org/&lt;foo">http://www.example.org/&lt;foo</a>' def test_loop_call_loop(self): tmpl = env.from_string(''' {% macro test() %} {{ caller() }} {% endmacro %} {% for num1 in range(5) %} {% call test() %} {% for num2 in range(10) %} {{ loop.index }} {% endfor %} {% endcall %} {% endfor %} ''') assert tmpl.render().split() == [text_type(x) for x in range(1, 11)] * 5 def test_weird_inline_comment(self): env = Environment(line_statement_prefix='%') self.assert_raises(TemplateSyntaxError, env.from_string, '% for item in seq {# missing #}\n...% endfor') def test_old_macro_loop_scoping_bug(self): tmpl = env.from_string('{% for i in (1, 2) %}{{ i }}{% endfor %}' '{% macro i() %}3{% endmacro %}{{ i() }}') assert tmpl.render() == '123' def test_partial_conditional_assignments(self): tmpl = env.from_string('{% if b %}{% set a = 42 %}{% endif %}{{ a }}') assert tmpl.render(a=23) == '23' assert tmpl.render(b=True) == '42' def test_stacked_locals_scoping_bug(self): env = Environment(line_statement_prefix='#') t = env.from_string('''\ # for j in [1, 2]: # set x = 1 # for i in [1, 2]: # print x # if i % 2 == 0: # set x = x + 1 # endif # endfor # endfor # if a # print 'A' # elif b # print 'B' # elif c == d # print 'C' # else # print 'D' # endif ''') assert t.render(a=0, b=False, c=42, d=42.0) == '1111C' def test_stacked_locals_scoping_bug_twoframe(self): t = Template(''' {% set x = 1 %} {% for item in foo %} {% if item == 1 %} {% set x = 2 %} {% endif %} {% endfor %} {{ x }} ''') rv = t.render(foo=[1]).strip() assert rv == u'1' def test_call_with_args(self): t = Template("""{% macro dump_users(users) -%} <ul> {%- for user in users -%} <li><p>{{ user.username|e }}</p>{{ caller(user) }}</li> {%- endfor -%} </ul> {%- endmacro -%} {% call(user) dump_users(list_of_user) -%} <dl> <dl>Realname</dl> <dd>{{ user.realname|e }}</dd> <dl>Description</dl> <dd>{{ user.description }}</dd> </dl> {% endcall %}""") assert [x.strip() for x in t.render(list_of_user=[{ 'username':'apo', 'realname':'something else', 'description':'test' }]).splitlines()] == [ u'<ul><li><p>apo</p><dl>', u'<dl>Realname</dl>', u'<dd>something else</dd>', u'<dl>Description</dl>', u'<dd>test</dd>', u'</dl>', u'</li></ul>' ] def test_empty_if_condition_fails(self): self.assert_raises(TemplateSyntaxError, Template, '{% if %}....{% endif %}') self.assert_raises(TemplateSyntaxError, Template, '{% if foo %}...{% elif %}...{% endif %}') self.assert_raises(TemplateSyntaxError, Template, '{% for x in %}..{% endfor %}') def test_recursive_loop_bug(self): tpl1 = Template(""" {% for p in foo recursive%} {{p.bar}} {% for f in p.fields recursive%} {{f.baz}} {{p.bar}} {% if f.rec %} {{ loop(f.sub) }} {% endif %} {% endfor %} {% endfor %} """) tpl2 = Template(""" {% for p in foo%} {{p.bar}} {% for f in p.fields recursive%} {{f.baz}} {{p.bar}} {% if f.rec %} {{ loop(f.sub) }} {% endif %} {% endfor %} {% endfor %} """) def test_else_loop_bug(self): t = Template(''' {% for x in y %} {{ loop.index0 }} {% else %} {% for i in range(3) %}{{ i }}{% endfor %} {% endfor %} ''') self.assertEqual(t.render(y=[]).strip(), '012') def test_correct_prefix_loader_name(self): env = Environment(loader=PrefixLoader({ 'foo': DictLoader({}) })) try: env.get_template('foo/bar.html') except TemplateNotFound as e: assert e.name == 'foo/bar.html' else: assert False, 'expected error here' def test_contextfunction_callable_classes(self): from jinja2.utils import contextfunction class CallableClass(object): @contextfunction def __call__(self, ctx): return ctx.resolve('hello') tpl = Template("""{{ callableclass() }}""") output = tpl.render(callableclass = CallableClass(), hello = 'TEST') expected = 'TEST' self.assert_equal(output, expected) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(CornerTestCase)) suite.addTest(unittest.makeSuite(BugTestCase)) return suite
mit
arannasousa/pagseguro_xml
build/lib/pagseguro_xml/tests/test_classes_notificacao/test_transacao_v3.py
2
6493
# coding=utf-8 # --------------------------------------------------------------- # Desenvolvedor: Arannã Sousa Santos # Mês: 12 # Ano: 2015 # Projeto: pagseguro_xml # e-mail: asousas@live.com # --------------------------------------------------------------- import unittest import logging import sys from ...core.base_classes import TagDataHoraUTC from decimal import Decimal class ClasseNotificacaoTransacaoTest(unittest.TestCase): def setUp(self): logging.basicConfig(stream=sys.stderr) logging.getLogger(u'%s.%s' % (__package__, self.__class__.__name__)).setLevel(logging.DEBUG) self.logger = logging self.xml = u'''<?xml version="1.0" encoding="ISO-8859-1" standalone="yes"?> <transaction> <date>2011-02-10T16:13:41.000-03:00</date> <code>9E884542-81B3-4419-9A75-BCC6FB495EF1</code> <reference>REF1234</reference> <type>1</type> <status>3</status> <paymentMethod> <type>1</type> <code>101</code> </paymentMethod> <grossAmount>49900.00</grossAmount> <discountAmount>0.00</discountAmount> <creditorFees> <intermediationRateAmount>0.40</intermediationRateAmount> <intermediationFeeAmount>1644.80</intermediationFeeAmount> </creditorFees> <netAmount>49900.00</netAmount> <extraAmount>0.00</extraAmount> <installmentCount>1</installmentCount> <itemCount>2</itemCount> <items> <item> <id>0001</id> <description>Notebook Prata</description> <quantity>1</quantity> <amount>24300.00</amount> </item> <item> <id>0002</id> <description>Notebook Rosa</description> <quantity>1</quantity> <amount>25600.00</amount> </item> </items> <sender> <name>José Comprador</name> <email>comprador@uol.com.br</email> <phone> <areaCode>11</areaCode> <number>56273440</number> </phone> </sender> <shipping> <address> <street>Av. Brig. Faria Lima</street> <number>1384</number> <complement>5o andar</complement> <district>Jardim Paulistano</district> <postalCode>01452002</postalCode> <city>Sao Paulo</city> <state>SP</state> <country>BRA</country> </address> <type>1</type> <cost>21.50</cost> </shipping> </transaction>''' def test_parse_xml(self): from ...notificacao.v3.classes import ClasseNotificacaoTransacao log = self.logger.getLogger(u'%s.%s' % (__package__, self.__class__.__name__)) log.debug(u'Criando instancia de "ClasseNotificacaoTransacao_v3"') result = ClasseNotificacaoTransacao() log.debug(u'Gerando PARSE do xml') result.xml = self.xml log.debug(u'Quantidade de alertas no "parse": %s' % len(result.alertas)) log.debug(u'Testando valores da "transaction"') data = TagDataHoraUTC() data.valor = u'2011-02-10T16:13:41.000-03:00' self.assertEqual(result.date.valor, data.valor) self.assertEqual(result.code.valor, u'9E884542-81B3-4419-9A75-BCC6FB495EF1') self.assertEqual(result.reference.valor, u'REF1234') self.assertEqual(result.type.valor, 1) self.assertEqual(result.status.valor, 3) self.assertEqual(result.paymentMethod.type.valor, 1) self.assertEqual(result.paymentMethod.code.valor, 101) self.assertEqual(result.grossAmount.valor, Decimal(u'49900.00')) self.assertEqual(result.discountAmount.valor, Decimal(u'0.00')) self.assertEqual(result.creditorFees.intermediationRateAmount.valor, Decimal(u'0.40')) self.assertEqual(result.creditorFees.intermediationFeeAmount.valor, Decimal(u'1644.80')) self.assertEqual(result.netAmount.valor, Decimal(u'49900.00')) self.assertEqual(result.extraAmount.valor, Decimal(u'0.00')) self.assertEqual(result.installmentCount.valor, 1) self.assertEqual(result.itemCount.valor, 2) self.assertEqual(len(result.items), 2) log.debug(u'Valores da "transacao" OK') items = [ { u'id': u'0001', u'description': u'Notebook Prata', u'quantity': Decimal(u'1.00'), u'amount': Decimal(u'24300.00'), }, { u'id': u'0002', u'description': u'Notebook Rosa', u'quantity': Decimal(u'1.00'), u'amount': Decimal(u'25600.00'), }, ] log.debug(u'Testando valores dos "itens"') for i, item in enumerate(result.items): log.debug(u'Item No: %s' % (i + 1)) self.assertEqual(item.ID.valor, items[i][u'id']) self.assertEqual(item.description.valor, items[i][u'description']) self.assertEqual(item.quantity.valor, items[i][u'quantity']) self.assertEqual(item.amount.valor, items[i][u'amount']) log.debug(u'Testando dados do "comprador"') self.assertEqual(result.sender.name.valor, u'José Comprador') self.assertEqual(result.sender.email.valor, u'comprador@uol.com.br') self.assertEqual(result.sender.phone.areaCode.valor, 11) self.assertEqual(result.sender.phone.number.valor, 56273440) log.debug(u'Testando dados do "endereco" do comprador') self.assertEqual(result.shipping.address.street.valor, u'Av. Brig. Faria Lima') self.assertEqual(result.shipping.address.number.valor, u'1384') self.assertEqual(result.shipping.address.complement.valor, u'5o andar') self.assertEqual(result.shipping.address.district.valor, u'Jardim Paulistano') self.assertEqual(result.shipping.address.city.valor, u'Sao Paulo') self.assertEqual(result.shipping.address.state.valor, u'SP') self.assertEqual(result.shipping.address.country.valor, u'BRA') self.assertEqual(result.shipping.address.postalCode.valor, u'01452002') self.assertEqual(result.shipping.type.valor, 1) self.assertEqual(result.shipping.cost.valor, Decimal(u'21.50'))
gpl-2.0
sh4t/Sick-Beard
cherrypy/lib/profiler.py
45
6449
"""Profiler tools for CherryPy. CherryPy users ============== You can profile any of your pages as follows: from cherrypy.lib import profiler class Root: p = profile.Profiler("/path/to/profile/dir") def index(self): self.p.run(self._index) index.exposed = True def _index(self): return "Hello, world!" cherrypy.tree.mount(Root()) You can also turn on profiling for all requests using the make_app function as WSGI middleware. CherryPy developers =================== This module can be used whenever you make changes to CherryPy, to get a quick sanity-check on overall CP performance. Use the "--profile" flag when running the test suite. Then, use the serve() function to browse the results in a web browser. If you run this module from the command line, it will call serve() for you. """ # Make profiler output more readable by adding __init__ modules' parents. def new_func_strip_path(func_name): filename, line, name = func_name if filename.endswith("__init__.py"): return os.path.basename(filename[:-12]) + filename[-12:], line, name return os.path.basename(filename), line, name try: import profile import pstats pstats.func_strip_path = new_func_strip_path except ImportError: profile = None pstats = None import os, os.path import sys import warnings try: from cStringIO import StringIO except ImportError: from StringIO import StringIO _count = 0 class Profiler(object): def __init__(self, path=None): if not path: path = os.path.join(os.path.dirname(__file__), "profile") self.path = path if not os.path.exists(path): os.makedirs(path) def run(self, func, *args, **params): """Dump profile data into self.path.""" global _count c = _count = _count + 1 path = os.path.join(self.path, "cp_%04d.prof" % c) prof = profile.Profile() result = prof.runcall(func, *args, **params) prof.dump_stats(path) return result def statfiles(self): """statfiles() -> list of available profiles.""" return [f for f in os.listdir(self.path) if f.startswith("cp_") and f.endswith(".prof")] def stats(self, filename, sortby='cumulative'): """stats(index) -> output of print_stats() for the given profile.""" sio = StringIO() if sys.version_info >= (2, 5): s = pstats.Stats(os.path.join(self.path, filename), stream=sio) s.strip_dirs() s.sort_stats(sortby) s.print_stats() else: # pstats.Stats before Python 2.5 didn't take a 'stream' arg, # but just printed to stdout. So re-route stdout. s = pstats.Stats(os.path.join(self.path, filename)) s.strip_dirs() s.sort_stats(sortby) oldout = sys.stdout try: sys.stdout = sio s.print_stats() finally: sys.stdout = oldout response = sio.getvalue() sio.close() return response def index(self): return """<html> <head><title>CherryPy profile data</title></head> <frameset cols='200, 1*'> <frame src='menu' /> <frame name='main' src='' /> </frameset> </html> """ index.exposed = True def menu(self): yield "<h2>Profiling runs</h2>" yield "<p>Click on one of the runs below to see profiling data.</p>" runs = self.statfiles() runs.sort() for i in runs: yield "<a href='report?filename=%s' target='main'>%s</a><br />" % (i, i) menu.exposed = True def report(self, filename): import cherrypy cherrypy.response.headers['Content-Type'] = 'text/plain' return self.stats(filename) report.exposed = True class ProfileAggregator(Profiler): def __init__(self, path=None): Profiler.__init__(self, path) global _count self.count = _count = _count + 1 self.profiler = profile.Profile() def run(self, func, *args): path = os.path.join(self.path, "cp_%04d.prof" % self.count) result = self.profiler.runcall(func, *args) self.profiler.dump_stats(path) return result class make_app: def __init__(self, nextapp, path=None, aggregate=False): """Make a WSGI middleware app which wraps 'nextapp' with profiling. nextapp: the WSGI application to wrap, usually an instance of cherrypy.Application. path: where to dump the profiling output. aggregate: if True, profile data for all HTTP requests will go in a single file. If False (the default), each HTTP request will dump its profile data into a separate file. """ if profile is None or pstats is None: msg = ("Your installation of Python does not have a profile module. " "If you're on Debian, try `sudo apt-get install python-profiler`. " "See http://www.cherrypy.org/wiki/ProfilingOnDebian for details.") warnings.warn(msg) self.nextapp = nextapp self.aggregate = aggregate if aggregate: self.profiler = ProfileAggregator(path) else: self.profiler = Profiler(path) def __call__(self, environ, start_response): def gather(): result = [] for line in self.nextapp(environ, start_response): result.append(line) return result return self.profiler.run(gather) def serve(path=None, port=8080): if profile is None or pstats is None: msg = ("Your installation of Python does not have a profile module. " "If you're on Debian, try `sudo apt-get install python-profiler`. " "See http://www.cherrypy.org/wiki/ProfilingOnDebian for details.") warnings.warn(msg) import cherrypy cherrypy.config.update({'server.socket_port': int(port), 'server.thread_pool': 10, 'environment': "production", }) cherrypy.quickstart(Profiler(path)) if __name__ == "__main__": serve(*tuple(sys.argv[1:]))
gpl-3.0
pnavarro/neutron
neutron/extensions/extra_dhcp_opt.py
13
3168
# Copyright (c) 2013 OpenStack Foundation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from neutron.api import extensions from neutron.api.v2 import attributes as attr from neutron.common import exceptions # ExtraDHcpOpts Exceptions class ExtraDhcpOptNotFound(exceptions.NotFound): message = _("ExtraDhcpOpt %(id)s could not be found") class ExtraDhcpOptBadData(exceptions.InvalidInput): message = _("Invalid data format for extra-dhcp-opt: %(data)s") def _validate_list_of_dict_or_none(data, key_specs=None): if data is not None: if not isinstance(data, list): raise ExtraDhcpOptBadData(data=data) for d in data: msg = attr._validate_dict(d, key_specs) if msg: raise ExtraDhcpOptBadData(data=msg) attr.validators['type:list_of_dict_or_none'] = _validate_list_of_dict_or_none # Attribute Map EXTRADHCPOPTS = 'extra_dhcp_opts' # Common definitions for maximum string field length DHCP_OPT_NAME_MAX_LEN = 64 DHCP_OPT_VALUE_MAX_LEN = 255 EXTENDED_ATTRIBUTES_2_0 = { 'ports': { EXTRADHCPOPTS: {'allow_post': True, 'allow_put': True, 'is_visible': True, 'default': None, 'validate': { 'type:list_of_dict_or_none': { 'id': {'type:uuid': None, 'required': False}, 'opt_name': {'type:not_empty_string': DHCP_OPT_NAME_MAX_LEN, 'required': True}, 'opt_value': {'type:not_empty_string_or_none': DHCP_OPT_VALUE_MAX_LEN, 'required': True}, 'ip_version': {'convert_to': attr.convert_to_int, 'type:values': [4, 6], 'required': False}}}}}} class Extra_dhcp_opt(extensions.ExtensionDescriptor): @classmethod def get_name(cls): return "Neutron Extra DHCP opts" @classmethod def get_alias(cls): return "extra_dhcp_opt" @classmethod def get_description(cls): return ("Extra options configuration for DHCP. " "For example PXE boot options to DHCP clients can " "be specified (e.g. tftp-server, server-ip-address, " "bootfile-name)") @classmethod def get_namespace(cls): return "http://docs.openstack.org/ext/neutron/extra_dhcp_opt/api/v1.0" @classmethod def get_updated(cls): return "2013-03-17T12:00:00-00:00" def get_extended_resources(self, version): if version == "2.0": return EXTENDED_ATTRIBUTES_2_0 else: return {}
apache-2.0
pombreda/iris-panel
iris/core/migrations/0004_remove_redundant_packs.py
7
15985
# -*- coding: utf-8 -*- # This file is part of IRIS: Infrastructure and Release Information System # # Copyright (C) 2013-2015 Intel Corporation # # IRIS is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # version 2.0 as published by the Free Software Foundation. #pylint: skip-file import datetime from south.db import db from south.v2 import DataMigration from django.db import models class Migration(DataMigration): def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." Package = orm['core.Package'] Backup = orm['core.PackageBackup'] prev = None to_be_del = [] for pack in Package.objects.order_by('name', 'id').all(): # for packages with the same name, # only keep the one which has the smallest id if prev is None or prev.name != pack.name: prev = pack else: to_be_del.append(pack.id) Backup.objects.filter(pid__in=to_be_del).update(isdel=True) Package.objects.filter(id__in=to_be_del).delete() def backwards(self, orm): "Write your backwards methods here." Package = orm['core.Package'] Backup = orm['core.PackageBackup'] to_be_add = [Package(id=bak.pid, name=bak.name, gittree_id=bak.tid) for bak in Backup.objects.filter(isdel=True).all()] Package.objects.bulk_create(to_be_add) Backup.objects.filter(isdel=True).update(isdel=False) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '225'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'core.domain': { 'Meta': {'object_name': 'Domain'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'core.domainrole': { 'Meta': {'object_name': 'DomainRole', '_ormbases': [u'auth.Group']}, 'domain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.Domain']"}), u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}) }, 'core.gittree': { 'Meta': {'object_name': 'GitTree'}, 'gitpath': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'licenses': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.License']", 'symmetrical': 'False'}), 'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"}) }, 'core.gittreerole': { 'Meta': {'object_name': 'GitTreeRole', '_ormbases': [u'auth.Group']}, 'gittree': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'role_set'", 'to': "orm['core.GitTree']"}), u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}) }, 'core.image': { 'Meta': {'object_name': 'Image'}, 'arch': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}), 'target': ('django.db.models.fields.TextField', [], {}) }, 'core.imagebuild': { 'Meta': {'object_name': 'ImageBuild'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Image']"}), 'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '8'}) }, 'core.license': { 'Meta': {'object_name': 'License'}, 'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'shortname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'text': ('django.db.models.fields.TextField', [], {}) }, 'core.log': { 'Meta': {'object_name': 'Log'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'core.package': { 'Meta': {'unique_together': "(('name', 'gittree'),)", 'object_name': 'Package'}, 'gittree': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.GitTree']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, 'core.packagebackup': { 'Meta': {'object_name': 'PackageBackup'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'isdel': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'pid': ('django.db.models.fields.IntegerField', [], {}), 'tid': ('django.db.models.fields.IntegerField', [], {}) }, 'core.packagebuild': { 'Meta': {'object_name': 'PackageBuild'}, 'arch': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Package']"}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '8'}), 'target': ('django.db.models.fields.TextField', [], {}) }, 'core.product': { 'Meta': {'object_name': 'Product'}, 'description': ('django.db.models.fields.TextField', [], {}), 'gittrees': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.GitTree']", 'symmetrical': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, 'core.productrole': { 'Meta': {'object_name': 'ProductRole', '_ormbases': [u'auth.Group']}, u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']"}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}) }, 'core.subdomain': { 'Meta': {'unique_together': "(('name', 'domain'),)", 'object_name': 'SubDomain'}, 'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Domain']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}) }, 'core.subdomainrole': { 'Meta': {'object_name': 'SubDomainRole', '_ormbases': [u'auth.Group']}, u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'subdomain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SubDomain']"}) }, 'core.submission': { 'Meta': {'object_name': 'Submission'}, 'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'commit': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'gittree': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.GitTree']", 'symmetrical': 'False', 'blank': 'True'}), 'ibuilds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.ImageBuild']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'db_index': 'True'}), 'pbuilds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.PackageBuild']", 'symmetrical': 'False', 'blank': 'True'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}), 'submitters': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'}), 'testresults': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.TestResult']", 'symmetrical': 'False', 'blank': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'core.submissiongroup': { 'Meta': {'object_name': 'SubmissionGroup'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'db_index': 'True'}), 'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'submissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Submission']", 'symmetrical': 'False'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'core.testresult': { 'Meta': {'object_name': 'TestResult'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'log': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Log']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '16'}) }, 'core.userparty': { 'Meta': {'object_name': 'UserParty', '_ormbases': [u'auth.Group']}, u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'party': ('django.db.models.fields.CharField', [], {'max_length': '15'}) }, 'core.userprofile': { 'Meta': {'object_name': 'UserProfile'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['core'] symmetrical = True
gpl-2.0
adriansuhov/lis-test
WS2012R2/lisa/Infrastructure/lisa-parser/tests/test_test_run.py
10
2745
from unittest import TestCase from os import remove, path import create_files from nose.tools import assert_equal from lisa_parser.test_run import TestRun class TestTestRun(TestCase): def setUp(self): self.test_run = TestRun(skip_vm_check=True, checkpoint_name=False) self.xml_file = path.join(path.dirname(__file__), 'test.xml') self.log_file = path.join(path.dirname(__file__), 'test.log') create_files.create_xml_file(self.xml_file) create_files.create_ica_file(self.log_file) def tearDown(self): remove(self.xml_file) remove(self.log_file) def test_update_from_xml(self): self.test_run.update_from_xml(self.xml_file) assert_equal(self.test_run.suite, 'Network') assert_equal(self.test_run.vms['vmname'].vm_name, 'vmname') assert_equal(self.test_run.vms['vmname'].os, 'linux') assert_equal(self.test_run.vms['vmname'].hv_server, 'localhost') assert_equal(self.test_run.test_cases['external'].name, 'external') assert_equal(self.test_run.test_cases['external'].covered_cases, 'NET-02') def test_update_from_ica(self): self.test_run.update_from_xml(self.xml_file) self.test_run.update_from_ica(self.log_file) assert_equal(self.test_run.timestamp, '01/01/2016 21:21:21') assert_equal(self.test_run.log_path, 'path_to_logs') assert_equal(self.test_run.lis_version, '4.4.21-64-default') assert_equal(self.test_run.test_cases['external'].results['vmname'], 'success') assert_equal(self.test_run.vms['vmname'].host_os, 'microsoft windows server 2012') assert_equal(self.test_run.vms['vmname'].hv_server, 'localhost') assert_equal(self.test_run.vms['vmname'].location, 'Hyper-V') def test_update_parse_for_insertion(self): self.test_run.update_from_xml(self.xml_file) self.test_run.update_from_ica(self.log_file) insertion_list = self.test_run.parse_for_db_insertion() assert_equal(insertion_list[0]['LogPath'], 'path_to_logs') assert_equal(insertion_list[0]['TestID'], 'NET-02') assert_equal(insertion_list[0]['TestLocation'], 'Hyper-V') assert_equal(insertion_list[0]['HostName'], 'localhost') assert_equal(insertion_list[0]['HostVersion'], 'microsoft windows server 2012') assert_equal(insertion_list[0]['GuestOSType'], 'linux') assert_equal(insertion_list[0]['LISVersion'], '4.4.21-64-default') assert_equal(insertion_list[0]['TestCaseName'], 'external') assert_equal(insertion_list[0]['TestResult'], 'success') assert_equal(insertion_list[0]['TestArea'], 'Network') assert_equal(insertion_list[0]['TestDate'], '20160101')
apache-2.0
40223101/2015cda_0505
static/Brython3.1.0-20150301-090019/Lib/unittest/main.py
739
10385
"""Unittest main program""" import sys import optparse import os from . import loader, runner from .signals import installHandler __unittest = True FAILFAST = " -f, --failfast Stop on first failure\n" CATCHBREAK = " -c, --catch Catch control-C and display results\n" BUFFEROUTPUT = " -b, --buffer Buffer stdout and stderr during test runs\n" USAGE_AS_MAIN = """\ Usage: %(progName)s [options] [tests] Options: -h, --help Show this message -v, --verbose Verbose output -q, --quiet Minimal output %(failfast)s%(catchbreak)s%(buffer)s Examples: %(progName)s test_module - run tests from test_module %(progName)s module.TestClass - run tests from module.TestClass %(progName)s module.Class.test_method - run specified test method [tests] can be a list of any number of test modules, classes and test methods. Alternative Usage: %(progName)s discover [options] Options: -v, --verbose Verbose output %(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default) -p pattern Pattern to match test files ('test*.py' default) -t directory Top level directory of project (default to start directory) For test discovery all test modules must be importable from the top level directory of the project. """ USAGE_FROM_MODULE = """\ Usage: %(progName)s [options] [test] [...] Options: -h, --help Show this message -v, --verbose Verbose output -q, --quiet Minimal output %(failfast)s%(catchbreak)s%(buffer)s Examples: %(progName)s - run default set of tests %(progName)s MyTestSuite - run suite 'MyTestSuite' %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething %(progName)s MyTestCase - run all 'test*' test methods in MyTestCase """ def _convert_name(name): # on Linux / Mac OS X 'foo.PY' is not importable, but on # Windows it is. Simpler to do a case insensitive match # a better check would be to check that the name is a # valid Python module name. if os.path.isfile(name) and name.lower().endswith('.py'): if os.path.isabs(name): rel_path = os.path.relpath(name, os.getcwd()) if os.path.isabs(rel_path) or rel_path.startswith(os.pardir): return name name = rel_path # on Windows both '\' and '/' are used as path # separators. Better to replace both than rely on os.path.sep return name[:-3].replace('\\', '.').replace('/', '.') return name def _convert_names(names): return [_convert_name(name) for name in names] class TestProgram(object): """A command-line program that runs a set of tests; this is primarily for making test modules conveniently executable. """ USAGE = USAGE_FROM_MODULE # defaults for testing failfast = catchbreak = buffer = progName = warnings = None def __init__(self, module='__main__', defaultTest=None, argv=None, testRunner=None, testLoader=loader.defaultTestLoader, exit=True, verbosity=1, failfast=None, catchbreak=None, buffer=None, warnings=None): if isinstance(module, str): self.module = __import__(module) for part in module.split('.')[1:]: self.module = getattr(self.module, part) else: self.module = module if argv is None: argv = sys.argv self.exit = exit self.failfast = failfast self.catchbreak = catchbreak self.verbosity = verbosity self.buffer = buffer if warnings is None and not sys.warnoptions: # even if DreprecationWarnings are ignored by default # print them anyway unless other warnings settings are # specified by the warnings arg or the -W python flag self.warnings = 'default' else: # here self.warnings is set either to the value passed # to the warnings args or to None. # If the user didn't pass a value self.warnings will # be None. This means that the behavior is unchanged # and depends on the values passed to -W. self.warnings = warnings self.defaultTest = defaultTest self.testRunner = testRunner self.testLoader = testLoader self.progName = os.path.basename(argv[0]) self.parseArgs(argv) self.runTests() def usageExit(self, msg=None): if msg: print(msg) usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '', 'buffer': ''} if self.failfast != False: usage['failfast'] = FAILFAST if self.catchbreak != False: usage['catchbreak'] = CATCHBREAK if self.buffer != False: usage['buffer'] = BUFFEROUTPUT print(self.USAGE % usage) sys.exit(2) def parseArgs(self, argv): if ((len(argv) > 1 and argv[1].lower() == 'discover') or (len(argv) == 1 and self.module is None)): self._do_discovery(argv[2:]) return parser = self._getOptParser() options, args = parser.parse_args(argv[1:]) self._setAttributesFromOptions(options) if len(args) == 0 and self.module is None: # this allows "python -m unittest -v" to still work for # test discovery. This means -c / -b / -v / -f options will # be handled twice, which is harmless but not ideal. self._do_discovery(argv[1:]) return if len(args) == 0 and self.defaultTest is None: # createTests will load tests from self.module self.testNames = None elif len(args) > 0: self.testNames = _convert_names(args) if __name__ == '__main__': # to support python -m unittest ... self.module = None else: self.testNames = (self.defaultTest,) self.createTests() def createTests(self): if self.testNames is None: self.test = self.testLoader.loadTestsFromModule(self.module) else: self.test = self.testLoader.loadTestsFromNames(self.testNames, self.module) def _getOptParser(self): import optparse parser = optparse.OptionParser() parser.prog = self.progName parser.add_option('-v', '--verbose', dest='verbose', default=False, help='Verbose output', action='store_true') parser.add_option('-q', '--quiet', dest='quiet', default=False, help='Quiet output', action='store_true') if self.failfast != False: parser.add_option('-f', '--failfast', dest='failfast', default=False, help='Stop on first fail or error', action='store_true') if self.catchbreak != False: parser.add_option('-c', '--catch', dest='catchbreak', default=False, help='Catch ctrl-C and display results so far', action='store_true') if self.buffer != False: parser.add_option('-b', '--buffer', dest='buffer', default=False, help='Buffer stdout and stderr during tests', action='store_true') return parser def _setAttributesFromOptions(self, options): # only set options from the parsing here # if they weren't set explicitly in the constructor if self.failfast is None: self.failfast = options.failfast if self.catchbreak is None: self.catchbreak = options.catchbreak if self.buffer is None: self.buffer = options.buffer if options.verbose: self.verbosity = 2 elif options.quiet: self.verbosity = 0 def _addDiscoveryOptions(self, parser): parser.add_option('-s', '--start-directory', dest='start', default='.', help="Directory to start discovery ('.' default)") parser.add_option('-p', '--pattern', dest='pattern', default='test*.py', help="Pattern to match tests ('test*.py' default)") parser.add_option('-t', '--top-level-directory', dest='top', default=None, help='Top level directory of project (defaults to start directory)') def _do_discovery(self, argv, Loader=None): if Loader is None: Loader = lambda: self.testLoader # handle command line args for test discovery self.progName = '%s discover' % self.progName parser = self._getOptParser() self._addDiscoveryOptions(parser) options, args = parser.parse_args(argv) if len(args) > 3: self.usageExit() for name, value in zip(('start', 'pattern', 'top'), args): setattr(options, name, value) self._setAttributesFromOptions(options) start_dir = options.start pattern = options.pattern top_level_dir = options.top loader = Loader() self.test = loader.discover(start_dir, pattern, top_level_dir) def runTests(self): if self.catchbreak: installHandler() if self.testRunner is None: self.testRunner = runner.TextTestRunner if isinstance(self.testRunner, type): try: testRunner = self.testRunner(verbosity=self.verbosity, failfast=self.failfast, buffer=self.buffer, warnings=self.warnings) except TypeError: # didn't accept the verbosity, buffer or failfast arguments testRunner = self.testRunner() else: # it is assumed to be a TestRunner instance testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: sys.exit(not self.result.wasSuccessful()) main = TestProgram
gpl-3.0
yiqingj/work
scons/scons-local-2.3.0/SCons/Tool/packaging/__init__.py
11
10772
"""SCons.Tool.Packaging SCons Packaging Tool. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. __revision__ = "src/engine/SCons/Tool/packaging/__init__.py 2013/03/03 09:48:35 garyo" import SCons.Environment from SCons.Variables import * from SCons.Errors import * from SCons.Util import is_List, make_path_relative from SCons.Warnings import warn, Warning import os, imp import SCons.Defaults __all__ = [ 'src_targz', 'src_tarbz2', 'src_zip', 'tarbz2', 'targz', 'zip', 'rpm', 'msi', 'ipk' ] # # Utility and Builder function # def Tag(env, target, source, *more_tags, **kw_tags): """ Tag a file with the given arguments, just sets the accordingly named attribute on the file object. TODO: FIXME """ if not target: target=source first_tag=None else: first_tag=source if first_tag: kw_tags[first_tag[0]] = '' if len(kw_tags) == 0 and len(more_tags) == 0: raise UserError("No tags given.") # XXX: sanity checks for x in more_tags: kw_tags[x] = '' if not SCons.Util.is_List(target): target=[target] else: # hmm, sometimes the target list, is a list of a list # make sure it is flattened prior to processing. # TODO: perhaps some bug ?!? target=env.Flatten(target) for t in target: for (k,v) in kw_tags.items(): # all file tags have to start with PACKAGING_, so we can later # differentiate between "normal" object attributes and the # packaging attributes. As the user should not be bothered with # that, the prefix will be added here if missing. #if not k.startswith('PACKAGING_'): if k[:10] != 'PACKAGING_': k='PACKAGING_'+k setattr(t, k, v) def Package(env, target=None, source=None, **kw): """ Entry point for the package tool. """ # check if we need to find the source files ourself if not source: source = env.FindInstalledFiles() if len(source)==0: raise UserError("No source for Package() given") # decide which types of packages shall be built. Can be defined through # four mechanisms: command line argument, keyword argument, # environment argument and default selection( zip or tar.gz ) in that # order. try: kw['PACKAGETYPE']=env['PACKAGETYPE'] except KeyError: pass if not kw.get('PACKAGETYPE'): from SCons.Script import GetOption kw['PACKAGETYPE'] = GetOption('package_type') if kw['PACKAGETYPE'] == None: if 'Tar' in env['BUILDERS']: kw['PACKAGETYPE']='targz' elif 'Zip' in env['BUILDERS']: kw['PACKAGETYPE']='zip' else: raise UserError("No type for Package() given") PACKAGETYPE=kw['PACKAGETYPE'] if not is_List(PACKAGETYPE): PACKAGETYPE=PACKAGETYPE.split(',') # load the needed packagers. def load_packager(type): try: file,path,desc=imp.find_module(type, __path__) return imp.load_module(type, file, path, desc) except ImportError, e: raise EnvironmentError("packager %s not available: %s"%(type,str(e))) packagers=list(map(load_packager, PACKAGETYPE)) # set up targets and the PACKAGEROOT try: # fill up the target list with a default target name until the PACKAGETYPE # list is of the same size as the target list. if not target: target = [] size_diff = len(PACKAGETYPE)-len(target) default_name = "%(NAME)s-%(VERSION)s" if size_diff>0: default_target = default_name%kw target.extend( [default_target]*size_diff ) if 'PACKAGEROOT' not in kw: kw['PACKAGEROOT'] = default_name%kw except KeyError, e: raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] ) # setup the source files source=env.arg2nodes(source, env.fs.Entry) # call the packager to setup the dependencies. targets=[] try: for packager in packagers: t=[target.pop(0)] t=packager.package(env,t,source, **kw) targets.extend(t) assert( len(target) == 0 ) except KeyError, e: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (e.args[0],packager.__name__) ) except TypeError, e: # this exception means that a needed argument for the packager is # missing. As our packagers get their "tags" as named function # arguments we need to find out which one is missing. from inspect import getargspec args,varargs,varkw,defaults=getargspec(packager.package) if defaults!=None: args=args[:-len(defaults)] # throw away arguments with default values args.remove('env') args.remove('target') args.remove('source') # now remove any args for which we have a value in kw. args=[x for x in args if x not in kw] if len(args)==0: raise # must be a different error, so reraise elif len(args)==1: raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\ % (args[0],packager.__name__) ) else: raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\ % (", ".join(args),packager.__name__) ) target=env.arg2nodes(target, env.fs.Entry) targets.extend(env.Alias( 'package', targets )) return targets # # SCons tool initialization functions # added = None def generate(env): from SCons.Script import AddOption global added if not added: added = 1 AddOption('--package-type', dest='package_type', default=None, type="string", action="store", help='The type of package to create.') try: env['BUILDERS']['Package'] env['BUILDERS']['Tag'] except KeyError: env['BUILDERS']['Package'] = Package env['BUILDERS']['Tag'] = Tag def exists(env): return 1 # XXX def options(opts): opts.AddVariables( EnumVariable( 'PACKAGETYPE', 'the type of package to create.', None, allowed_values=list(map( str, __all__ )), ignorecase=2 ) ) # # Internal utility functions # def copy_attr(f1, f2): """ copies the special packaging file attributes from f1 to f2. """ #pattrs = [x for x in dir(f1) if not hasattr(f2, x) and\ # x.startswith('PACKAGING_')] copyit = lambda x: not hasattr(f2, x) and x[:10] == 'PACKAGING_' pattrs = list(filter(copyit, dir(f1))) for attr in pattrs: setattr(f2, attr, getattr(f1, attr)) def putintopackageroot(target, source, env, pkgroot, honor_install_location=1): """ Uses the CopyAs builder to copy all source files to the directory given in pkgroot. If honor_install_location is set and the copied source file has an PACKAGING_INSTALL_LOCATION attribute, the PACKAGING_INSTALL_LOCATION is used as the new name of the source file under pkgroot. The source file will not be copied if it is already under the the pkgroot directory. All attributes of the source file will be copied to the new file. """ # make sure the packageroot is a Dir object. if SCons.Util.is_String(pkgroot): pkgroot=env.Dir(pkgroot) if not SCons.Util.is_List(source): source=[source] new_source = [] for file in source: if SCons.Util.is_String(file): file = env.File(file) if file.is_under(pkgroot): new_source.append(file) else: if hasattr(file, 'PACKAGING_INSTALL_LOCATION') and\ honor_install_location: new_name=make_path_relative(file.PACKAGING_INSTALL_LOCATION) else: new_name=make_path_relative(file.get_path()) new_file=pkgroot.File(new_name) new_file=env.CopyAs(new_file, file)[0] copy_attr(file, new_file) new_source.append(new_file) return (target, new_source) def stripinstallbuilder(target, source, env): """ strips the install builder action from the source list and stores the final installation location as the "PACKAGING_INSTALL_LOCATION" of the source of the source file. This effectively removes the final installed files from the source list while remembering the installation location. It also warns about files which have no install builder attached. """ def has_no_install_location(file): return not (file.has_builder() and\ hasattr(file.builder, 'name') and\ (file.builder.name=="InstallBuilder" or\ file.builder.name=="InstallAsBuilder")) if len(list(filter(has_no_install_location, source))): warn(Warning, "there are files to package which have no\ InstallBuilder attached, this might lead to irreproducible packages") n_source=[] for s in source: if has_no_install_location(s): n_source.append(s) else: for ss in s.sources: n_source.append(ss) copy_attr(s, ss) setattr(ss, 'PACKAGING_INSTALL_LOCATION', s.get_path()) return (target, n_source) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lgpl-2.1
yfried/ansible
lib/ansible/modules/network/nxos/nxos_igmp_snooping.py
41
8914
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = ''' --- module: nxos_igmp_snooping extends_documentation_fragment: nxos version_added: "2.2" short_description: Manages IGMP snooping global configuration. description: - Manages IGMP snooping global configuration. author: - Jason Edelman (@jedelman8) - Gabriele Gerbino (@GGabriele) notes: - Tested against NXOSv 7.3.(0)D1(1) on VIRL - When C(state=default), params will be reset to a default state. - C(group_timeout) also accepts I(never) as an input. options: snooping: description: - Enables/disables IGMP snooping on the switch. type: bool group_timeout: description: - Group membership timeout value for all VLANs on the device. Accepted values are integer in range 1-10080, I(never) and I(default). link_local_grp_supp: description: - Global link-local groups suppression. type: bool report_supp: description: - Global IGMPv1/IGMPv2 Report Suppression. type: bool v3_report_supp: description: - Global IGMPv3 Report Suppression and Proxy Reporting. type: bool state: description: - Manage the state of the resource. default: present choices: ['present','default'] ''' EXAMPLES = ''' # ensure igmp snooping params supported in this module are in there default state - nxos_igmp_snooping: state: default # ensure following igmp snooping params are in the desired state - nxos_igmp_snooping: group_timeout: never snooping: true link_local_grp_supp: false optimize_mcast_flood: false report_supp: true v3_report_supp: true ''' RETURN = ''' commands: description: command sent to the device returned: always type: list sample: ["ip igmp snooping link-local-groups-suppression", "ip igmp snooping group-timeout 50", "no ip igmp snooping report-suppression", "no ip igmp snooping v3-report-suppression", "no ip igmp snooping"] ''' import re from ansible.module_utils.network.nxos.nxos import get_config, load_config, run_commands from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule def execute_show_command(command, module, output='text'): command = { 'command': command, 'output': output, } return run_commands(module, [command]) def flatten_list(command_lists): flat_command_list = [] for command in command_lists: if isinstance(command, list): flat_command_list.extend(command) else: flat_command_list.append(command) return flat_command_list def get_group_timeout(config): match = re.search(r' Group timeout configured: (\S+)', config, re.M) if match: value = match.group(1) else: value = '' return value def get_igmp_snooping(module): command = 'show ip igmp snooping' existing = {} try: body = execute_show_command(command, module, output='json')[0] except IndexError: body = [] if body: snooping = str(body.get('enabled')).lower() if snooping == 'true' or snooping == 'enabled': existing['snooping'] = True else: existing['snooping'] = False report_supp = str(body.get('grepsup')).lower() if report_supp == 'true' or report_supp == 'enabled': existing['report_supp'] = True else: existing['report_supp'] = False link_local_grp_supp = str(body.get('glinklocalgrpsup')).lower() if link_local_grp_supp == 'true' or link_local_grp_supp == 'enabled': existing['link_local_grp_supp'] = True else: existing['link_local_grp_supp'] = False v3_report_supp = str(body.get('gv3repsup')).lower() if v3_report_supp == 'true' or v3_report_supp == 'enabled': existing['v3_report_supp'] = True else: existing['v3_report_supp'] = False command = 'show ip igmp snooping' body = execute_show_command(command, module)[0] if body: existing['group_timeout'] = get_group_timeout(body) return existing def config_igmp_snooping(delta, existing, default=False): CMDS = { 'snooping': 'ip igmp snooping', 'group_timeout': 'ip igmp snooping group-timeout {}', 'link_local_grp_supp': 'ip igmp snooping link-local-groups-suppression', 'v3_report_supp': 'ip igmp snooping v3-report-suppression', 'report_supp': 'ip igmp snooping report-suppression' } commands = [] command = None for key, value in delta.items(): if value: if default and key == 'group_timeout': if existing.get(key): command = 'no ' + CMDS.get(key).format(existing.get(key)) elif value == 'default' and key == 'group_timeout': if existing.get(key): command = 'no ' + CMDS.get(key).format(existing.get(key)) else: command = CMDS.get(key).format(value) else: command = 'no ' + CMDS.get(key).format(value) if command: commands.append(command) command = None return commands def get_igmp_snooping_defaults(): group_timeout = 'dummy' report_supp = True link_local_grp_supp = True v3_report_supp = False snooping = True args = dict(snooping=snooping, link_local_grp_supp=link_local_grp_supp, report_supp=report_supp, v3_report_supp=v3_report_supp, group_timeout=group_timeout) default = dict((param, value) for (param, value) in args.items() if value is not None) return default def main(): argument_spec = dict( snooping=dict(required=False, type='bool'), group_timeout=dict(required=False, type='str'), link_local_grp_supp=dict(required=False, type='bool'), report_supp=dict(required=False, type='bool'), v3_report_supp=dict(required=False, type='bool'), state=dict(choices=['present', 'default'], default='present'), ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() check_args(module, warnings) results = {'changed': False, 'commands': [], 'warnings': warnings} snooping = module.params['snooping'] link_local_grp_supp = module.params['link_local_grp_supp'] report_supp = module.params['report_supp'] v3_report_supp = module.params['v3_report_supp'] group_timeout = module.params['group_timeout'] state = module.params['state'] args = dict(snooping=snooping, link_local_grp_supp=link_local_grp_supp, report_supp=report_supp, v3_report_supp=v3_report_supp, group_timeout=group_timeout) proposed = dict((param, value) for (param, value) in args.items() if value is not None) existing = get_igmp_snooping(module) commands = [] if state == 'present': delta = dict( set(proposed.items()).difference(existing.items()) ) if delta: command = config_igmp_snooping(delta, existing) if command: commands.append(command) elif state == 'default': proposed = get_igmp_snooping_defaults() delta = dict( set(proposed.items()).difference(existing.items()) ) if delta: command = config_igmp_snooping(delta, existing, default=True) if command: commands.append(command) cmds = flatten_list(commands) if cmds: results['changed'] = True if not module.check_mode: load_config(module, cmds) if 'configure' in cmds: cmds.pop(0) results['commands'] = cmds module.exit_json(**results) if __name__ == '__main__': main()
gpl-3.0
labsquare/CuteVariant
cutevariant/gui/formatters/cutestyle.py
1
5115
# Standard imports import re # Qt imports from PySide2.QtGui import QColor, QFont, QBrush, QPainter, QPen, QFontMetrics, QPalette from PySide2.QtCore import Qt, QModelIndex, QRect, QUrl from PySide2.QtWidgets import QStyleOptionViewItem, QStyle # Custom imports from cutevariant.gui.formatter import Formatter from cutevariant.gui import FIcon import cutevariant.commons as cm class CutestyleFormatter(Formatter): DISPLAY_NAME = "Cute style" BASE_COLOR = {"A": "green", "C": "red", "T": "red"} SO_COLOR = { # https://natsukis.livejournal.com/2048.html "missense_variant": "#bb96ff", "synonymous_variant": "#67eebd", "stop_gained": "#ed6d79", "stop_lost": "#ed6d79", "frameshift_variant": "#ff89b5", } ACMG_ICON = { "0": FIcon(0xF03A1, "lightgray"), "1": FIcon(0xF03A4, "#71e096"), "2": FIcon(0xF03A7, "#71e096"), "3": FIcon(0xF03AA, "#f5a26f"), "4": FIcon(0xF03AD, "#ed6d79"), "5": FIcon(0xF03B1, "#ed6d79"), } IMPACT_COLOR = { "HIGH": "#ff4b5c", "LOW": "#056674", "MODERATE": "#ecad7d", "MODIFIER": "#ecad7d", } FAV_ICON = {0: FIcon(0xF00C3), 1: FIcon(0xF00C0)} # Cache genotype icons # Values in gt field as keys (str), FIcon as values GENOTYPE_ICONS = {key: FIcon(val) for key, val in cm.GENOTYPE_ICONS.items()} def __init__(self): super().__init__() def paint( self, painter: QPainter, option: QStyleOptionViewItem, index: QModelIndex ): """Apply graphical formatting to each item in each displayed column in the view""" brush = QBrush() pen = QPen() font = QFont() if option.state & QStyle.State_Selected: text_color = option.palette.color(QPalette.Normal, QPalette.BrightText) else: text_color = option.palette.color(QPalette.Normal, QPalette.Text) is_selected = option.state & QStyle.State_Selected # Default theme color pen.setColor(text_color) field_name = self.field_name(index).lower() value = self.value(index) if value == "NULL": font.setItalic(True) pen.setColor("lightgray") # Colour bases (default color is the one of the current theme) if (field_name == "ref" or field_name == "alt") and ( value in ("A", "C", "G", "T") and not is_selected ): pen.setColor( self.BASE_COLOR.get(value, option.palette.color(QPalette.WindowText)) ) if field_name == "impact" and not is_selected: font.setBold(True) pen.setColor(self.IMPACT_COLOR.get(value, self.IMPACT_COLOR["MODIFIER"])) if field_name == "gene" and not is_selected: pen.setColor("#6a9fca") if field_name == "classification": icon = self.ACMG_ICON.get(str(value), self.ACMG_ICON["0"]) self.draw_icon(painter, option.rect, icon) return if field_name == "favorite": icon = self.FAV_ICON.get(int(value), self.FAV_ICON[0]) self.draw_icon(painter, option.rect, icon) return if field_name == "hgvs_c": font.setBold(True) m = re.search(r"([cnm]\..+)", str(value)) if m: value = m.group(1) if field_name == "hgvs_p": font.setBold(True) m = re.search(r"(p\..+)", str(value)) if m: value = m.group(1) if re.match(r"sample\[.+\]\.gt", field_name): icon = self.GENOTYPE_ICONS.get(int(value), self.GENOTYPE_ICONS[-1]) self.draw_icon(painter, option.rect, icon) return if field_name == "consequence": values = str(self.value(index)).split("&") metrics = QFontMetrics(font) x = option.rect.x() + 5 # y = option.rect.center().y() for value in values: width = metrics.width(value) height = metrics.height() rect = QRect(x, 0, width + 15, height + 10) rect.moveCenter(option.rect.center()) rect.moveLeft(x) painter.setFont(font) painter.setClipRect(option.rect, Qt.IntersectClip) painter.setBrush(QBrush(QColor(self.SO_COLOR.get(value, "#90d4f7")))) painter.setPen(Qt.NoPen) painter.drawRoundedRect(rect, 3, 3) painter.setPen(QPen(QColor("white"))) painter.drawText(rect, Qt.AlignCenter | Qt.AlignVCenter, value) x += width + 20 painter.setClipping(False) return # if field_name == "rsid": # self.draw_url( # painter, option.rect, value, QUrl("http://www.google.fr"), index # ) # return painter.setBrush(brush) painter.setPen(pen) painter.setFont(font) painter.drawText(option.rect, option.displayAlignment, value)
gpl-3.0
dimid/ansible-modules-extras
cloud/vmware/vmware_dvs_portgroup.py
31
6867
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Joseph Callen <jcallen () csc.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: vmware_dvs_portgroup short_description: Create or remove a Distributed vSwitch portgroup description: - Create or remove a Distributed vSwitch portgroup version_added: 2.0 author: "Joseph Callen (@jcpowermac)" notes: - Tested on vSphere 5.5 requirements: - "python >= 2.6" - PyVmomi options: portgroup_name: description: - The name of the portgroup that is to be created or deleted required: True switch_name: description: - The name of the distributed vSwitch the port group should be created on. required: True vlan_id: description: - The VLAN ID that should be configured with the portgroup required: True num_ports: description: - The number of ports the portgroup should contain required: True portgroup_type: description: - See VMware KB 1022312 regarding portgroup types required: True choices: - 'earlyBinding' - 'lateBinding' - 'ephemeral' extends_documentation_fragment: vmware.documentation ''' EXAMPLES = ''' - name: Create Management portgroup local_action: module: vmware_dvs_portgroup hostname: vcenter_ip_or_hostname username: vcenter_username password: vcenter_password portgroup_name: Management switch_name: dvSwitch vlan_id: 123 num_ports: 120 portgroup_type: earlyBinding state: present ''' try: from pyVmomi import vim, vmodl HAS_PYVMOMI = True except ImportError: HAS_PYVMOMI = False class VMwareDvsPortgroup(object): def __init__(self, module): self.module = module self.dvs_portgroup = None self.switch_name = self.module.params['switch_name'] self.portgroup_name = self.module.params['portgroup_name'] self.vlan_id = self.module.params['vlan_id'] self.num_ports = self.module.params['num_ports'] self.portgroup_type = self.module.params['portgroup_type'] self.dv_switch = None self.state = self.module.params['state'] self.content = connect_to_api(module) def process_state(self): try: dvspg_states = { 'absent': { 'present': self.state_destroy_dvspg, 'absent': self.state_exit_unchanged, }, 'present': { 'update': self.state_update_dvspg, 'present': self.state_exit_unchanged, 'absent': self.state_create_dvspg, } } dvspg_states[self.state][self.check_dvspg_state()]() except vmodl.RuntimeFault as runtime_fault: self.module.fail_json(msg=runtime_fault.msg) except vmodl.MethodFault as method_fault: self.module.fail_json(msg=method_fault.msg) except Exception as e: self.module.fail_json(msg=str(e)) def create_port_group(self): config = vim.dvs.DistributedVirtualPortgroup.ConfigSpec() config.name = self.portgroup_name config.numPorts = self.num_ports # vim.VMwareDVSPortSetting() does not exist in the pyvmomi documentation # but this is the correct managed object type. config.defaultPortConfig = vim.VMwareDVSPortSetting() # vim.VmwareDistributedVirtualSwitchVlanIdSpec() does not exist in the # pyvmomi documentation but this is the correct managed object type config.defaultPortConfig.vlan = vim.VmwareDistributedVirtualSwitchVlanIdSpec() config.defaultPortConfig.vlan.inherited = False config.defaultPortConfig.vlan.vlanId = self.vlan_id config.type = self.portgroup_type spec = [config] task = self.dv_switch.AddDVPortgroup_Task(spec) changed, result = wait_for_task(task) return changed, result def state_destroy_dvspg(self): changed = True result = None if not self.module.check_mode: task = self.dvs_portgroup.Destroy_Task() changed, result = wait_for_task(task) self.module.exit_json(changed=changed, result=str(result)) def state_exit_unchanged(self): self.module.exit_json(changed=False) def state_update_dvspg(self): self.module.exit_json(changed=False, msg="Currently not implemented.") def state_create_dvspg(self): changed = True result = None if not self.module.check_mode: changed, result = self.create_port_group() self.module.exit_json(changed=changed, result=str(result)) def check_dvspg_state(self): self.dv_switch = find_dvs_by_name(self.content, self.switch_name) if self.dv_switch is None: raise Exception("A distributed virtual switch with name %s does not exist" % self.switch_name) self.dvs_portgroup = find_dvspg_by_name(self.dv_switch, self.portgroup_name) if self.dvs_portgroup is None: return 'absent' else: return 'present' def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict(portgroup_name=dict(required=True, type='str'), switch_name=dict(required=True, type='str'), vlan_id=dict(required=True, type='int'), num_ports=dict(required=True, type='int'), portgroup_type=dict(required=True, choices=['earlyBinding', 'lateBinding', 'ephemeral'], type='str'), state=dict(default='present', choices=['present', 'absent'], type='str'))) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) if not HAS_PYVMOMI: module.fail_json(msg='pyvmomi is required for this module') vmware_dvs_portgroup = VMwareDvsPortgroup(module) vmware_dvs_portgroup.process_state() from ansible.module_utils.vmware import * from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
hydraplatform/hydra-base
hydra_base/db/alembic/versions/08f0ebe40290_resource_attr_id.py
1
5617
"""resource_attr_id Revision ID: 08f0ebe40290 Revises: a7896842f484 Create Date: 2018-04-16 14:49:24.202974 """ from alembic import op import sqlalchemy as sa import logging log = logging.getLogger(__name__) # revision identifiers, used by Alembic. revision = '08f0ebe40290' down_revision = 'a7896842f484' branch_labels = None depends_on = None def upgrade(): if op.get_bind().dialect.name == 'mysql': # ### tResourceAttrt try: op.alter_column('tResourceAttr', 'resource_attr_id', new_column_name='id', existing_type=sa.Integer(), primary_key=True, autoincrement=True, nullable=False) except Exception as e: log.exception(e) else: ## sqlite # ## tResourceAttr try: op.drop_table('tResourceAttr_new') except: log.info("tResourceAttr_new isn't there") try: # ## tResourceAttr op.create_table( 'tResourceAttr_new', sa.Column('id', sa.Integer(), primary_key=True, nullable=False), sa.Column('attr_id', sa.Integer(), sa.ForeignKey('tAttr.id'), nullable=False), sa.Column('ref_key', sa.String(60), nullable=False, index=True), sa.Column('network_id', sa.Integer(), sa.ForeignKey('tNetwork.id'), index=True, nullable=True,), sa.Column('project_id', sa.Integer(), sa.ForeignKey('tProject.id'), index=True, nullable=True,), sa.Column('node_id', sa.Integer(), sa.ForeignKey('tNode.id'), index=True, nullable=True), sa.Column('link_id', sa.Integer(), sa.ForeignKey('tLink.id'), index=True, nullable=True), sa.Column('group_id', sa.Integer(), sa.ForeignKey('tResourceGroup.id'), index=True, nullable=True), sa.Column('attr_is_var', sa.String(1), nullable=False, server_default=sa.text(u"'N'")), sa.Column('cr_date', sa.TIMESTAMP(), nullable=False, server_default=sa.text(u'CURRENT_TIMESTAMP')), sa.UniqueConstraint('network_id', 'attr_id', name = 'net_attr_1'), sa.UniqueConstraint('project_id', 'attr_id', name = 'proj_attr_1'), sa.UniqueConstraint('node_id', 'attr_id', name = 'node_attr_1'), sa.UniqueConstraint('link_id', 'attr_id', name = 'link_attr_1'), sa.UniqueConstraint('group_id', 'attr_id', name = 'group_attr_1'), ) op.execute("insert into tResourceAttr_new (id, attr_id, ref_key, network_id, project_id, node_id, link_id, group_id, attr_is_var, cr_date) select resource_attr_id, attr_id, ref_key, network_id, project_id, node_id, link_id, group_id, attr_is_var, cr_date from tResourceAttr") op.rename_table('tResourceAttr','tResourceAttr_old') op.rename_table('tResourceAttr_new', 'tResourceAttr') op.drop_table('tResourceAttr_old') except Exception as e: log.exception(e) def downgrade(): if op.get_bind().dialect.name == 'mysql': # ### tProject try: op.alter_column('tResourceAttr', 'id', new_column_name='resource_attr_id', existing_type=sa.Integer(), primary_key=True, autoincrement=True, nullable=False) except Exception as e: log.exception(e) else: ## sqlite # ## tResourceAttr try: op.drop_table('tResourceAttr_new') except: log.info("tResourceAttr_new isn't there") try: # ## tResourceAttr op.create_table( 'tResourceAttr_new', sa.Column('resource_attr_id', sa.Integer(), primary_key=True, nullable=False), sa.Column('attr_id', sa.Integer(), sa.ForeignKey('tAttr.id'), nullable=False), sa.Column('ref_key', sa.String(60), nullable=False, index=True), sa.Column('network_id', sa.Integer(), sa.ForeignKey('tNetwork.id'), index=True, nullable=True,), sa.Column('project_id', sa.Integer(), sa.ForeignKey('tProject.id'), index=True, nullable=True,), sa.Column('node_id', sa.Integer(), sa.ForeignKey('tNode.id'), index=True, nullable=True), sa.Column('link_id', sa.Integer(), sa.ForeignKey('tLink.id'), index=True, nullable=True), sa.Column('group_id', sa.Integer(), sa.ForeignKey('tResourceGroup.id'), index=True, nullable=True), sa.Column('attr_is_var', sa.String(1), nullable=False, server_default=sa.text(u"'N'")), sa.Column('cr_date', sa.TIMESTAMP(), nullable=False, server_default=sa.text(u'CURRENT_TIMESTAMP')), sa.UniqueConstraint('network_id', 'attr_id', name = 'net_attr_1'), sa.UniqueConstraint('project_id', 'attr_id', name = 'proj_attr_1'), sa.UniqueConstraint('node_id', 'attr_id', name = 'node_attr_1'), sa.UniqueConstraint('link_id', 'attr_id', name = 'link_attr_1'), sa.UniqueConstraint('group_id', 'attr_id', name = 'group_attr_1'), ) op.execute("insert into tResourceAttr_new (resource_attr_id, attr_id, ref_key, network_id, project_id, node_id, link_id, group_id, attr_is_var, cr_date) select id, attr_id, ref_key, network_id, project_id, node_id, link_id, group_id, attr_is_var, cr_date from tResourceAttr") op.rename_table('tResourceAttr','tResourceAttr_old') op.rename_table('tResourceAttr_new', 'tResourceAttr') op.drop_table('tResourceAttr_old') except Exception as e: log.exception(e)
lgpl-3.0
Tong-Chen/scikit-learn
examples/svm/plot_separating_hyperplane.py
12
1252
""" ========================================= SVM: Maximum margin separating hyperplane ========================================= Plot the maximum margin separating hyperplane within a two-class separable dataset using a Support Vector Machines classifier with linear kernel. """ print(__doc__) import numpy as np import pylab as pl from sklearn import svm # we create 40 separable points np.random.seed(0) X = np.r_[np.random.randn(20, 2) - [2, 2], np.random.randn(20, 2) + [2, 2]] Y = [0] * 20 + [1] * 20 # fit the model clf = svm.SVC(kernel='linear') clf.fit(X, Y) # get the separating hyperplane w = clf.coef_[0] a = -w[0] / w[1] xx = np.linspace(-5, 5) yy = a * xx - (clf.intercept_[0]) / w[1] # plot the parallels to the separating hyperplane that pass through the # support vectors b = clf.support_vectors_[0] yy_down = a * xx + (b[1] - a * b[0]) b = clf.support_vectors_[-1] yy_up = a * xx + (b[1] - a * b[0]) # plot the line, the points, and the nearest vectors to the plane pl.plot(xx, yy, 'k-') pl.plot(xx, yy_down, 'k--') pl.plot(xx, yy_up, 'k--') pl.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80, facecolors='none') pl.scatter(X[:, 0], X[:, 1], c=Y, cmap=pl.cm.Paired) pl.axis('tight') pl.show()
bsd-3-clause
gangadharkadam/tailorerp
erpnext/setup/page/setup_wizard/install_fixtures.py
35
11749
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe import _ def install(country=None): records = [ # address template {'doctype':"Address Template", "country": country}, # item group {'doctype': 'Item Group', 'item_group_name': _('All Item Groups'), 'is_group': 'Yes', 'parent_item_group': ''}, {'doctype': 'Item Group', 'item_group_name': _('Products'), 'is_group': 'No', 'parent_item_group': _('All Item Groups'), "show_in_website": 1 }, {'doctype': 'Item Group', 'item_group_name': _('Raw Material'), 'is_group': 'No', 'parent_item_group': _('All Item Groups') }, {'doctype': 'Item Group', 'item_group_name': _('Services'), 'is_group': 'No', 'parent_item_group': _('All Item Groups') }, {'doctype': 'Item Group', 'item_group_name': _('Sub Assemblies'), 'is_group': 'No', 'parent_item_group': _('All Item Groups') }, {'doctype': 'Item Group', 'item_group_name': _('Consumable'), 'is_group': 'No', 'parent_item_group': _('All Item Groups') }, # deduction type {'doctype': 'Deduction Type', 'name': _('Income Tax'), 'description': _('Income Tax'), 'deduction_name': _('Income Tax')}, # earning type {'doctype': 'Earning Type', 'name': _('Basic'), 'description': _('Basic'), 'earning_name': _('Basic'), 'taxable': 'Yes'}, # expense claim type {'doctype': 'Expense Claim Type', 'name': _('Calls'), 'expense_type': _('Calls')}, {'doctype': 'Expense Claim Type', 'name': _('Food'), 'expense_type': _('Food')}, {'doctype': 'Expense Claim Type', 'name': _('Medical'), 'expense_type': _('Medical')}, {'doctype': 'Expense Claim Type', 'name': _('Others'), 'expense_type': _('Others')}, {'doctype': 'Expense Claim Type', 'name': _('Travel'), 'expense_type': _('Travel')}, # leave type {'doctype': 'Leave Type', 'leave_type_name': _('Casual Leave'), 'name': _('Casual Leave'), 'is_encash': 1, 'is_carry_forward': 1, 'max_days_allowed': '3', }, {'doctype': 'Leave Type', 'leave_type_name': _('Compensatory Off'), 'name': _('Compensatory Off'), 'is_encash': 0, 'is_carry_forward': 0, }, {'doctype': 'Leave Type', 'leave_type_name': _('Sick Leave'), 'name': _('Sick Leave'), 'is_encash': 0, 'is_carry_forward': 0, }, {'doctype': 'Leave Type', 'leave_type_name': _('Privilege Leave'), 'name': _('Privilege Leave'), 'is_encash': 0, 'is_carry_forward': 0, }, {'doctype': 'Leave Type', 'leave_type_name': _('Leave Without Pay'), 'name': _('Leave Without Pay'), 'is_encash': 0, 'is_carry_forward': 0, 'is_lwp':1}, # Employment Type {'doctype': 'Employment Type', 'employee_type_name': _('Full-time')}, {'doctype': 'Employment Type', 'employee_type_name': _('Part-time')}, {'doctype': 'Employment Type', 'employee_type_name': _('Probation')}, {'doctype': 'Employment Type', 'employee_type_name': _('Contract')}, {'doctype': 'Employment Type', 'employee_type_name': _('Commission')}, {'doctype': 'Employment Type', 'employee_type_name': _('Piecework')}, {'doctype': 'Employment Type', 'employee_type_name': _('Intern')}, {'doctype': 'Employment Type', 'employee_type_name': _('Apprentice')}, # Department {'doctype': 'Department', 'department_name': _('Accounts')}, {'doctype': 'Department', 'department_name': _('Marketing')}, {'doctype': 'Department', 'department_name': _('Sales')}, {'doctype': 'Department', 'department_name': _('Purchase')}, {'doctype': 'Department', 'department_name': _('Operations')}, {'doctype': 'Department', 'department_name': _('Production')}, {'doctype': 'Department', 'department_name': _('Dispatch')}, {'doctype': 'Department', 'department_name': _('Customer Service')}, {'doctype': 'Department', 'department_name': _('Human Resources')}, {'doctype': 'Department', 'department_name': _('Management')}, {'doctype': 'Department', 'department_name': _('Quality Management')}, {'doctype': 'Department', 'department_name': _('Research & Development')}, {'doctype': 'Department', 'department_name': _('Legal')}, # Designation {'doctype': 'Designation', 'designation_name': _('CEO')}, {'doctype': 'Designation', 'designation_name': _('Manager')}, {'doctype': 'Designation', 'designation_name': _('Analyst')}, {'doctype': 'Designation', 'designation_name': _('Engineer')}, {'doctype': 'Designation', 'designation_name': _('Accountant')}, {'doctype': 'Designation', 'designation_name': _('Secretary')}, {'doctype': 'Designation', 'designation_name': _('Associate')}, {'doctype': 'Designation', 'designation_name': _('Administrative Officer')}, {'doctype': 'Designation', 'designation_name': _('Business Development Manager')}, {'doctype': 'Designation', 'designation_name': _('HR Manager')}, {'doctype': 'Designation', 'designation_name': _('Project Manager')}, {'doctype': 'Designation', 'designation_name': _('Head of Marketing and Sales')}, {'doctype': 'Designation', 'designation_name': _('Software Developer')}, {'doctype': 'Designation', 'designation_name': _('Designer')}, {'doctype': 'Designation', 'designation_name': _('Assistant')}, {'doctype': 'Designation', 'designation_name': _('Researcher')}, # territory {'doctype': 'Territory', 'territory_name': _('All Territories'), 'is_group': 'Yes', 'name': _('All Territories'), 'parent_territory': ''}, # customer group {'doctype': 'Customer Group', 'customer_group_name': _('All Customer Groups'), 'is_group': 'Yes', 'name': _('All Customer Groups'), 'parent_customer_group': ''}, {'doctype': 'Customer Group', 'customer_group_name': _('Individual'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')}, {'doctype': 'Customer Group', 'customer_group_name': _('Commercial'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')}, {'doctype': 'Customer Group', 'customer_group_name': _('Non Profit'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')}, {'doctype': 'Customer Group', 'customer_group_name': _('Government'), 'is_group': 'No', 'parent_customer_group': _('All Customer Groups')}, # supplier type {'doctype': 'Supplier Type', 'supplier_type': _('Services')}, {'doctype': 'Supplier Type', 'supplier_type': _('Local')}, {'doctype': 'Supplier Type', 'supplier_type': _('Raw Material')}, {'doctype': 'Supplier Type', 'supplier_type': _('Electrical')}, {'doctype': 'Supplier Type', 'supplier_type': _('Hardware')}, {'doctype': 'Supplier Type', 'supplier_type': _('Pharmaceutical')}, {'doctype': 'Supplier Type', 'supplier_type': _('Distributor')}, # Sales Person {'doctype': 'Sales Person', 'sales_person_name': _('Sales Team'), 'is_group': "Yes", "parent_sales_person": ""}, # UOM {'uom_name': _('Unit'), 'doctype': 'UOM', 'name': _('Unit'), "must_be_whole_number": 1}, {'uom_name': _('Box'), 'doctype': 'UOM', 'name': _('Box'), "must_be_whole_number": 1}, {'uom_name': _('Kg'), 'doctype': 'UOM', 'name': _('Kg')}, {'uom_name': _('Nos'), 'doctype': 'UOM', 'name': _('Nos'), "must_be_whole_number": 1}, {'uom_name': _('Pair'), 'doctype': 'UOM', 'name': _('Pair'), "must_be_whole_number": 1}, {'uom_name': _('Set'), 'doctype': 'UOM', 'name': _('Set'), "must_be_whole_number": 1}, {'uom_name': _('Hour'), 'doctype': 'UOM', 'name': _('Hour')}, {'uom_name': _('Minute'), 'doctype': 'UOM', 'name': _('Minute')}, # Mode of Payment {'doctype': 'Mode of Payment', 'mode_of_payment': 'Check' if country=="United States" else _('Cheque')}, {'doctype': 'Mode of Payment', 'mode_of_payment': _('Cash')}, {'doctype': 'Mode of Payment', 'mode_of_payment': _('Credit Card')}, {'doctype': 'Mode of Payment', 'mode_of_payment': _('Wire Transfer')}, {'doctype': 'Mode of Payment', 'mode_of_payment': _('Bank Draft')}, # Activity Type {'doctype': 'Activity Type', 'activity_type': _('Planning')}, {'doctype': 'Activity Type', 'activity_type': _('Research')}, {'doctype': 'Activity Type', 'activity_type': _('Proposal Writing')}, {'doctype': 'Activity Type', 'activity_type': _('Execution')}, {'doctype': 'Activity Type', 'activity_type': _('Communication')}, # Industry Type {'doctype': 'Industry Type', 'industry': _('Accounting')}, {'doctype': 'Industry Type', 'industry': _('Advertising')}, {'doctype': 'Industry Type', 'industry': _('Aerospace')}, {'doctype': 'Industry Type', 'industry': _('Agriculture')}, {'doctype': 'Industry Type', 'industry': _('Airline')}, {'doctype': 'Industry Type', 'industry': _('Apparel & Accessories')}, {'doctype': 'Industry Type', 'industry': _('Automotive')}, {'doctype': 'Industry Type', 'industry': _('Banking')}, {'doctype': 'Industry Type', 'industry': _('Biotechnology')}, {'doctype': 'Industry Type', 'industry': _('Broadcasting')}, {'doctype': 'Industry Type', 'industry': _('Brokerage')}, {'doctype': 'Industry Type', 'industry': _('Chemical')}, {'doctype': 'Industry Type', 'industry': _('Computer')}, {'doctype': 'Industry Type', 'industry': _('Consulting')}, {'doctype': 'Industry Type', 'industry': _('Consumer Products')}, {'doctype': 'Industry Type', 'industry': _('Cosmetics')}, {'doctype': 'Industry Type', 'industry': _('Defense')}, {'doctype': 'Industry Type', 'industry': _('Department Stores')}, {'doctype': 'Industry Type', 'industry': _('Education')}, {'doctype': 'Industry Type', 'industry': _('Electronics')}, {'doctype': 'Industry Type', 'industry': _('Energy')}, {'doctype': 'Industry Type', 'industry': _('Entertainment & Leisure')}, {'doctype': 'Industry Type', 'industry': _('Executive Search')}, {'doctype': 'Industry Type', 'industry': _('Financial Services')}, {'doctype': 'Industry Type', 'industry': _('Food, Beverage & Tobacco')}, {'doctype': 'Industry Type', 'industry': _('Grocery')}, {'doctype': 'Industry Type', 'industry': _('Health Care')}, {'doctype': 'Industry Type', 'industry': _('Internet Publishing')}, {'doctype': 'Industry Type', 'industry': _('Investment Banking')}, {'doctype': 'Industry Type', 'industry': _('Legal')}, {'doctype': 'Industry Type', 'industry': _('Manufacturing')}, {'doctype': 'Industry Type', 'industry': _('Motion Picture & Video')}, {'doctype': 'Industry Type', 'industry': _('Music')}, {'doctype': 'Industry Type', 'industry': _('Newspaper Publishers')}, {'doctype': 'Industry Type', 'industry': _('Online Auctions')}, {'doctype': 'Industry Type', 'industry': _('Pension Funds')}, {'doctype': 'Industry Type', 'industry': _('Pharmaceuticals')}, {'doctype': 'Industry Type', 'industry': _('Private Equity')}, {'doctype': 'Industry Type', 'industry': _('Publishing')}, {'doctype': 'Industry Type', 'industry': _('Real Estate')}, {'doctype': 'Industry Type', 'industry': _('Retail & Wholesale')}, {'doctype': 'Industry Type', 'industry': _('Securities & Commodity Exchanges')}, {'doctype': 'Industry Type', 'industry': _('Service')}, {'doctype': 'Industry Type', 'industry': _('Soap & Detergent')}, {'doctype': 'Industry Type', 'industry': _('Software')}, {'doctype': 'Industry Type', 'industry': _('Sports')}, {'doctype': 'Industry Type', 'industry': _('Technology')}, {'doctype': 'Industry Type', 'industry': _('Telecommunications')}, {'doctype': 'Industry Type', 'industry': _('Television')}, {'doctype': 'Industry Type', 'industry': _('Transportation')}, {'doctype': 'Industry Type', 'industry': _('Venture Capital')} ] from frappe.modules import scrub for r in records: doc = frappe.new_doc(r.get("doctype")) doc.update(r) # ignore mandatory for root parent_link_field = ("parent_" + scrub(doc.doctype)) if doc.meta.get_field(parent_link_field) and not doc.get(parent_link_field): doc.ignore_mandatory = True doc.insert()
agpl-3.0
benrudolph/commcare-hq
custom/ilsgateway/tanzania/handlers/delivered.py
1
2453
from datetime import datetime from corehq.apps.sms.api import send_sms_to_verified_number from custom.ilsgateway.tanzania.handlers import get_location from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler from custom.ilsgateway.models import SupplyPointStatus, SupplyPointStatusTypes, SupplyPointStatusValues from corehq.apps.users.models import CommCareUser from custom.ilsgateway.tanzania.reminders import DELIVERY_CONFIRM_DISTRICT, DELIVERY_PARTIAL_CONFIRM, DELIVERY_CONFIRM_CHILDREN class DeliveredHandler(KeywordHandler): def _send_delivery_alert_to_facilities(self, sp_name, location): locs = [c._id for c in location.children] users = filter(lambda u: u.location_id in locs, CommCareUser.by_domain(self.domain)) for user in users: if user.get_verified_number(): send_sms_to_verified_number(user.get_verified_number(), DELIVERY_CONFIRM_CHILDREN % {"district_name": sp_name}) def handle(self): location = get_location(self.domain, self.user, None) SupplyPointStatus.objects.create(supply_point=location['case']._id, status_type=SupplyPointStatusTypes.DELIVERY_FACILITY, status_value=SupplyPointStatusValues.RECEIVED, status_date=datetime.utcnow()) return False def help(self): location = get_location(self.domain, self.user, None) status_type = None if location['location'].location_type == 'FACILITY': status_type = SupplyPointStatusTypes.DELIVERY_FACILITY self.respond(DELIVERY_PARTIAL_CONFIRM) elif location['location'].location_type == 'DISTRICT': status_type = SupplyPointStatusTypes.DELIVERY_DISTRICT self._send_delivery_alert_to_facilities(location['case'].name, location['location']) self.respond(DELIVERY_CONFIRM_DISTRICT, contact_name=self.user.first_name + " " + self.user.last_name, facility_name=location['case'].name) SupplyPointStatus.objects.create(supply_point=location['case']._id, status_type=status_type, status_value=SupplyPointStatusValues.RECEIVED, status_date=datetime.utcnow()) return True
bsd-3-clause
abhilashnta/edx-platform
lms/djangoapps/shoppingcart/processors/tests/test_CyberSource.py
159
13163
""" Tests for the CyberSource processor handler """ from collections import OrderedDict from django.test import TestCase from django.test.utils import override_settings from django.conf import settings from student.tests.factories import UserFactory from shoppingcart.models import Order, OrderItem from shoppingcart.processors.helpers import get_processor_config from shoppingcart.processors.exceptions import ( CCProcessorException, CCProcessorSignatureException, CCProcessorDataException, CCProcessorWrongAmountException ) from shoppingcart.processors.CyberSource import ( render_purchase_form_html, process_postpay_callback, processor_hash, verify_signatures, sign, REASONCODE_MAP, record_purchase, get_processor_decline_html, get_processor_exception_html, payment_accepted, ) from mock import patch, Mock TEST_CC_PROCESSOR_NAME = "CyberSource" TEST_CC_PROCESSOR = { 'CyberSource': { 'SHARED_SECRET': 'secret', 'MERCHANT_ID': 'edx_test', 'SERIAL_NUMBER': '12345', 'ORDERPAGE_VERSION': '7', 'PURCHASE_ENDPOINT': '', 'microsites': { 'test_microsite': { 'SHARED_SECRET': 'secret_override', 'MERCHANT_ID': 'edx_test_override', 'SERIAL_NUMBER': '12345_override', 'ORDERPAGE_VERSION': '7', 'PURCHASE_ENDPOINT': '', } } } } def fakemicrosite(name, default=None): """ This is a test mocking function to return a microsite configuration """ if name == 'cybersource_config_key': return 'test_microsite' else: return None @override_settings( CC_PROCESSOR_NAME=TEST_CC_PROCESSOR_NAME, CC_PROCESSOR=TEST_CC_PROCESSOR ) class CyberSourceTests(TestCase): def test_override_settings(self): self.assertEqual(settings.CC_PROCESSOR['CyberSource']['MERCHANT_ID'], 'edx_test') self.assertEqual(settings.CC_PROCESSOR['CyberSource']['SHARED_SECRET'], 'secret') def test_microsite_no_override_settings(self): self.assertEqual(get_processor_config()['MERCHANT_ID'], 'edx_test') self.assertEqual(get_processor_config()['SHARED_SECRET'], 'secret') @patch("microsite_configuration.microsite.get_value", fakemicrosite) def test_microsite_override_settings(self): self.assertEqual(get_processor_config()['MERCHANT_ID'], 'edx_test_override') self.assertEqual(get_processor_config()['SHARED_SECRET'], 'secret_override') def test_hash(self): """ Tests the hash function. Basically just hardcodes the answer. """ self.assertEqual(processor_hash('test'), 'GqNJWF7X7L07nEhqMAZ+OVyks1Y=') self.assertEqual(processor_hash('edx '), '/KowheysqM2PFYuxVKg0P8Flfk4=') def test_sign_then_verify(self): """ "loopback" test: Tests the that the verify function verifies parameters signed by the sign function """ params = OrderedDict() params['amount'] = "12.34" params['currency'] = 'usd' params['orderPage_transactionType'] = 'sale' params['orderNumber'] = "567" verify_signatures(sign(params), signed_fields_key='orderPage_signedFields', full_sig_key='orderPage_signaturePublic') # if the above verify_signature fails it will throw an exception, so basically we're just # testing for the absence of that exception. the trivial assert below does that self.assertEqual(1, 1) def test_sign_then_verify_unicode(self): """ Similar to the test above, which loops back to the original. Testing to make sure we can handle unicode parameters """ params = { 'card_accountNumber': '1234', 'card_cardType': '001', 'billTo_firstName': u'\u2699', 'billTo_lastName': u"\u2603", 'orderNumber': '1', 'orderCurrency': 'usd', 'decision': 'ACCEPT', 'ccAuthReply_amount': '0.00' } verify_signatures(sign(params), signed_fields_key='orderPage_signedFields', full_sig_key='orderPage_signaturePublic') # if the above verify_signature fails it will throw an exception, so basically we're just # testing for the absence of that exception. the trivial assert below does that self.assertEqual(1, 1) def test_verify_exception(self): """ Tests that failure to verify raises the proper CCProcessorSignatureException """ params = OrderedDict() params['a'] = 'A' params['b'] = 'B' params['signedFields'] = 'A,B' params['signedDataPublicSignature'] = 'WONTVERIFY' with self.assertRaises(CCProcessorSignatureException): verify_signatures(params) def test_get_processor_decline_html(self): """ Tests the processor decline html message """ DECISION = 'REJECT' for code, reason in REASONCODE_MAP.iteritems(): params = { 'decision': DECISION, 'reasonCode': code, } html = get_processor_decline_html(params) self.assertIn(DECISION, html) self.assertIn(reason, html) self.assertIn(code, html) self.assertIn(settings.PAYMENT_SUPPORT_EMAIL, html) def test_get_processor_exception_html(self): """ Tests the processor exception html message """ for type in [CCProcessorSignatureException, CCProcessorWrongAmountException, CCProcessorDataException]: error_msg = "An exception message of with exception type {0}".format(str(type)) exception = type(error_msg) html = get_processor_exception_html(exception) self.assertIn(settings.PAYMENT_SUPPORT_EMAIL, html) self.assertIn('Sorry!', html) self.assertIn(error_msg, html) # test base case self.assertIn("EXCEPTION!", get_processor_exception_html(CCProcessorException())) def test_record_purchase(self): """ Tests record_purchase with good and without returned CCNum """ student1 = UserFactory() student1.save() student2 = UserFactory() student2.save() params_cc = {'card_accountNumber': '1234', 'card_cardType': '001', 'billTo_firstName': student1.first_name} params_nocc = {'card_accountNumber': '', 'card_cardType': '002', 'billTo_firstName': student2.first_name} order1 = Order.get_cart_for_user(student1) order2 = Order.get_cart_for_user(student2) record_purchase(params_cc, order1) record_purchase(params_nocc, order2) self.assertEqual(order1.bill_to_first, student1.first_name) self.assertEqual(order1.status, 'purchased') order2 = Order.objects.get(user=student2) self.assertEqual(order2.bill_to_first, student2.first_name) self.assertEqual(order2.status, 'purchased') def test_payment_accepted_invalid_dict(self): """ Tests exception is thrown when params to payment_accepted don't have required key or have an bad value """ baseline = { 'orderNumber': '1', 'orderCurrency': 'usd', 'decision': 'ACCEPT', } wrong = { 'orderNumber': 'k', } # tests for missing key for key in baseline: params = baseline.copy() del params[key] with self.assertRaises(CCProcessorDataException): payment_accepted(params) # tests for keys with value that can't be converted to proper type for key in wrong: params = baseline.copy() params[key] = wrong[key] with self.assertRaises(CCProcessorDataException): payment_accepted(params) def test_payment_accepted_order(self): """ Tests payment_accepted cases with an order """ student1 = UserFactory() student1.save() order1 = Order.get_cart_for_user(student1) params = { 'card_accountNumber': '1234', 'card_cardType': '001', 'billTo_firstName': student1.first_name, 'billTo_lastName': u"\u2603", 'orderNumber': str(order1.id), 'orderCurrency': 'usd', 'decision': 'ACCEPT', 'ccAuthReply_amount': '0.00' } # tests for an order number that doesn't match up params_bad_ordernum = params.copy() params_bad_ordernum['orderNumber'] = str(order1.id + 10) with self.assertRaises(CCProcessorDataException): payment_accepted(params_bad_ordernum) # tests for a reply amount of the wrong type params_wrong_type_amt = params.copy() params_wrong_type_amt['ccAuthReply_amount'] = 'ab' with self.assertRaises(CCProcessorDataException): payment_accepted(params_wrong_type_amt) # tests for a reply amount of the wrong type params_wrong_amt = params.copy() params_wrong_amt['ccAuthReply_amount'] = '1.00' with self.assertRaises(CCProcessorWrongAmountException): payment_accepted(params_wrong_amt) # tests for a not accepted order params_not_accepted = params.copy() params_not_accepted['decision'] = "REJECT" self.assertFalse(payment_accepted(params_not_accepted)['accepted']) # finally, tests an accepted order self.assertTrue(payment_accepted(params)['accepted']) @patch('shoppingcart.processors.CyberSource.render_to_string', autospec=True) def test_render_purchase_form_html(self, render): """ Tests the rendering of the purchase form """ student1 = UserFactory() student1.save() order1 = Order.get_cart_for_user(student1) item1 = OrderItem(order=order1, user=student1, unit_cost=1.0, line_cost=1.0) item1.save() render_purchase_form_html(order1) ((template, context), render_kwargs) = render.call_args self.assertEqual(template, 'shoppingcart/cybersource_form.html') self.assertDictContainsSubset({'amount': '1.00', 'currency': 'usd', 'orderPage_transactionType': 'sale', 'orderNumber': str(order1.id)}, context['params']) def test_process_postpay_exception(self): """ Tests the exception path of process_postpay_callback """ baseline = { 'orderNumber': '1', 'orderCurrency': 'usd', 'decision': 'ACCEPT', } # tests for missing key for key in baseline: params = baseline.copy() del params[key] result = process_postpay_callback(params) self.assertFalse(result['success']) self.assertIsNone(result['order']) self.assertIn('error_msg', result['error_html']) @patch('shoppingcart.processors.CyberSource.verify_signatures', Mock(return_value=True)) def test_process_postpay_accepted(self): """ Tests the ACCEPTED path of process_postpay """ student1 = UserFactory() student1.save() order1 = Order.get_cart_for_user(student1) params = { 'card_accountNumber': '1234', 'card_cardType': '001', 'billTo_firstName': student1.first_name, 'orderNumber': str(order1.id), 'orderCurrency': 'usd', 'decision': 'ACCEPT', 'ccAuthReply_amount': '0.00' } result = process_postpay_callback(params) self.assertTrue(result['success']) self.assertEqual(result['order'], order1) order1 = Order.objects.get(id=order1.id) # reload from DB to capture side-effect of process_postpay_callback self.assertEqual(order1.status, 'purchased') self.assertFalse(result['error_html']) @patch('shoppingcart.processors.CyberSource.verify_signatures', Mock(return_value=True)) def test_process_postpay_not_accepted(self): """ Tests the non-ACCEPTED path of process_postpay """ student1 = UserFactory() student1.save() order1 = Order.get_cart_for_user(student1) params = { 'card_accountNumber': '1234', 'card_cardType': '001', 'billTo_firstName': student1.first_name, 'orderNumber': str(order1.id), 'orderCurrency': 'usd', 'decision': 'REJECT', 'ccAuthReply_amount': '0.00', 'reasonCode': '207' } result = process_postpay_callback(params) self.assertFalse(result['success']) self.assertEqual(result['order'], order1) self.assertEqual(order1.status, 'cart') self.assertIn(REASONCODE_MAP['207'], result['error_html'])
agpl-3.0
Jandersoft/jander777-ghost
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/styles/emacs.py
364
2486
# -*- coding: utf-8 -*- """ pygments.styles.emacs ~~~~~~~~~~~~~~~~~~~~~ A highlighting style for Pygments, inspired by Emacs. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace class EmacsStyle(Style): """ The default style (inspired by Emacs 22). """ background_color = "#f8f8f8" default_style = "" styles = { Whitespace: "#bbbbbb", Comment: "italic #008800", Comment.Preproc: "noitalic", Comment.Special: "noitalic bold", Keyword: "bold #AA22FF", Keyword.Pseudo: "nobold", Keyword.Type: "bold #00BB00", Operator: "#666666", Operator.Word: "bold #AA22FF", Name.Builtin: "#AA22FF", Name.Function: "#00A000", Name.Class: "#0000FF", Name.Namespace: "bold #0000FF", Name.Exception: "bold #D2413A", Name.Variable: "#B8860B", Name.Constant: "#880000", Name.Label: "#A0A000", Name.Entity: "bold #999999", Name.Attribute: "#BB4444", Name.Tag: "bold #008000", Name.Decorator: "#AA22FF", String: "#BB4444", String.Doc: "italic", String.Interpol: "bold #BB6688", String.Escape: "bold #BB6622", String.Regex: "#BB6688", String.Symbol: "#B8860B", String.Other: "#008000", Number: "#666666", Generic.Heading: "bold #000080", Generic.Subheading: "bold #800080", Generic.Deleted: "#A00000", Generic.Inserted: "#00A000", Generic.Error: "#FF0000", Generic.Emph: "italic", Generic.Strong: "bold", Generic.Prompt: "bold #000080", Generic.Output: "#888", Generic.Traceback: "#04D", Error: "border:#FF0000" }
mit
anilanar/Debuggr
debuggr/helper/helper_27.py
5
1222
""" Helper module for Python version 2.7 - Ordered dictionaries - Encoding/decoding urls - Unicode - Exception handling (except Exception as e) """ import base64 from urllib import unquote, quote from collections import OrderedDict def modulename(): return "Helper module for Python version 2.7" def url_decode(uri): return unquote(uri) def url_encode(uri): return quote(uri) def new_dictionary(): return OrderedDict() def dictionary_keys(dictionary): return list(dictionary.keys()) def dictionary_values(dictionary): return list(dictionary.values()) def data_read(data): # Data for reading/receiving already a string in version 2.* return data def data_write(data): # Using string in version 2.* for sending/writing data return data def base64_decode(data): return base64.b64decode(data) def base64_encode(data): return base64.b64encode(data) def unicode_chr(code): return unichr(code) def unicode_string(string): if isinstance(string, unicode): return string return string.decode('utf8', 'replace') def is_digit(string): # Check if basestring (str, unicode) is digit return isinstance(string, basestring) and string.isdigit() def is_number(value): return isinstance(value, (int, long))
mit
wwitzel3/awx
awx/settings/production.py
1
4010
# Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. # Production settings for AWX project. # Python import os import copy import errno import sys import traceback # Django Split Settings from split_settings.tools import optional, include # Load default settings. from defaults import * # NOQA DEBUG = False TEMPLATE_DEBUG = DEBUG SQL_DEBUG = DEBUG # Clear database settings to force production environment to define them. DATABASES = {} # Clear the secret key to force production environment to define it. SECRET_KEY = None # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Production should only use minified JS for UI. USE_MINIFIED_JS = True # URL used by inventory script and callback plugin to access API. INTERNAL_API_URL = 'http://127.0.0.1:80' # Absolute filesystem path to the directory for job status stdout # This directory should not be web-accessible JOBOUTPUT_ROOT = '/var/lib/awx/job_status/' # The heartbeat file for the tower scheduler SCHEDULE_METADATA_LOCATION = '/var/lib/awx/.tower_cycle' # Ansible base virtualenv paths and enablement BASE_VENV_PATH = "/var/lib/awx/venv" ANSIBLE_VENV_PATH = os.path.join(BASE_VENV_PATH, "ansible") # Tower base virtualenv paths and enablement AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx") AWX_ISOLATED_USERNAME = 'awx' LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log' LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log' LOGGING['handlers']['dispatcher']['filename'] = '/var/log/tower/dispatcher.log' LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log' LOGGING['handlers']['fact_receiver']['filename'] = '/var/log/tower/fact_receiver.log' LOGGING['handlers']['management_playbooks']['filename'] = '/var/log/tower/management_playbooks.log' LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log' LOGGING['handlers']['rbac_migrations']['filename'] = '/var/log/tower/tower_rbac_migrations.log' # Store a snapshot of default settings at this point before loading any # customizable config files. DEFAULTS_SNAPSHOT = {} this_module = sys.modules[__name__] for setting in dir(this_module): if setting == setting.upper(): DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting)) # Load settings from any .py files in the global conf.d directory specified in # the environment, defaulting to /etc/tower/conf.d/. settings_dir = os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/') settings_files = os.path.join(settings_dir, '*.py') # Load remaining settings from the global settings file specified in the # environment, defaulting to /etc/tower/settings.py. settings_file = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py') # Attempt to load settings from /etc/tower/settings.py first, followed by # /etc/tower/conf.d/*.py. try: include(settings_file, optional(settings_files), scope=locals()) except ImportError: traceback.print_exc() sys.exit(1) except IOError: from django.core.exceptions import ImproperlyConfigured included_file = locals().get('__included_file__', '') if (not included_file or included_file == settings_file): # The import doesn't always give permission denied, so try to open the # settings file directly. try: e = None open(settings_file) except IOError as e: pass if e and e.errno == errno.EACCES: SECRET_KEY = 'permission-denied' LOGGING = {} else: msg = 'No AWX configuration found at %s.' % settings_file msg += '\nDefine the AWX_SETTINGS_FILE environment variable to ' msg += 'specify an alternate path.' raise ImproperlyConfigured(msg) else: raise
apache-2.0
ForToffee/DinoTweet
tweet.py
1
1646
import tweepy import time import picamera from datetime import datetime # Consumer keys and access tokens, used for OAuth consumer_key = '<your value>' consumer_secret = '<your value>' access_token = '<your value>' access_token_secret = '<your value>' # OAuth process, using the keys and tokens auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) # Creation of the actual interface, using authentication api = tweepy.API(auth) searches = [["#eghamjam",1], ['#camjam',2], ['#wakedino',3], ['#tofftest',4]] searchstring = '' for pairs in searches: searchstring += pairs[0] + " OR " searchstring = searchstring[:-4] last_id = 0 def searchHashTag(): global last_id ret = api.search(q=searchstring,count=1, since_id=last_id) if len(ret) > 0: tweet = ret[0] print '===== START =====' print '{} (@{}) - {}\n{}'.format(tweet.author.name.encode('utf-8'), tweet.author.screen_name.encode('utf-8'), tweet.created_at, tweet.text.encode('utf-8')) print '====== END ======' last_id = tweet.id for pairs in searches: if pairs[0].lower() in tweet.text.lower(): #GOTCHA - case sensitive! return pairs[1] return -1 def sendTweet(): print 'Starting photo capture.... smile!' i = datetime.now() now = i.strftime('%Y%m%d-%H%M%S') photo_name = now + '.jpg' with picamera.PiCamera() as camera: camera.resolution = (800,600) camera.start_preview() time.sleep(3) camera.capture(photo_name) camera.stop_preview() photo_path = photo_name api.update_with_media(photo_path, status='This puny human made me Roar and Stomp!') print 'Tweet sent'
gpl-2.0
charlesvdv/servo
tests/wpt/css-tests/tools/pywebsocket/src/test/test_util.py
449
7538
#!/usr/bin/env python # # Copyright 2011, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for util module.""" import os import random import sys import unittest import set_sys_path # Update sys.path to locate mod_pywebsocket module. from mod_pywebsocket import util _TEST_DATA_DIR = os.path.join(os.path.split(__file__)[0], 'testdata') class UtilTest(unittest.TestCase): """A unittest for util module.""" def test_get_stack_trace(self): self.assertEqual('None\n', util.get_stack_trace()) try: a = 1 / 0 # Intentionally raise exception. except Exception: trace = util.get_stack_trace() self.failUnless(trace.startswith('Traceback')) self.failUnless(trace.find('ZeroDivisionError') != -1) def test_prepend_message_to_exception(self): exc = Exception('World') self.assertEqual('World', str(exc)) util.prepend_message_to_exception('Hello ', exc) self.assertEqual('Hello World', str(exc)) def test_get_script_interp(self): cygwin_path = 'c:\\cygwin\\bin' cygwin_perl = os.path.join(cygwin_path, 'perl') self.assertEqual(None, util.get_script_interp( os.path.join(_TEST_DATA_DIR, 'README'))) self.assertEqual(None, util.get_script_interp( os.path.join(_TEST_DATA_DIR, 'README'), cygwin_path)) self.assertEqual('/usr/bin/perl -wT', util.get_script_interp( os.path.join(_TEST_DATA_DIR, 'hello.pl'))) self.assertEqual(cygwin_perl + ' -wT', util.get_script_interp( os.path.join(_TEST_DATA_DIR, 'hello.pl'), cygwin_path)) def test_hexify(self): self.assertEqual('61 7a 41 5a 30 39 20 09 0d 0a 00 ff', util.hexify('azAZ09 \t\r\n\x00\xff')) class RepeatedXorMaskerTest(unittest.TestCase): """A unittest for RepeatedXorMasker class.""" def test_mask(self): # Sample input e6,97,a5 is U+65e5 in UTF-8 masker = util.RepeatedXorMasker('\xff\xff\xff\xff') result = masker.mask('\xe6\x97\xa5') self.assertEqual('\x19\x68\x5a', result) masker = util.RepeatedXorMasker('\x00\x00\x00\x00') result = masker.mask('\xe6\x97\xa5') self.assertEqual('\xe6\x97\xa5', result) masker = util.RepeatedXorMasker('\xe6\x97\xa5\x20') result = masker.mask('\xe6\x97\xa5') self.assertEqual('\x00\x00\x00', result) def test_mask_twice(self): masker = util.RepeatedXorMasker('\x00\x7f\xff\x20') # mask[0], mask[1], ... will be used. result = masker.mask('\x00\x00\x00\x00\x00') self.assertEqual('\x00\x7f\xff\x20\x00', result) # mask[2], mask[0], ... will be used for the next call. result = masker.mask('\x00\x00\x00\x00\x00') self.assertEqual('\x7f\xff\x20\x00\x7f', result) def test_mask_large_data(self): masker = util.RepeatedXorMasker('mASk') original = ''.join([chr(i % 256) for i in xrange(1000)]) result = masker.mask(original) expected = ''.join( [chr((i % 256) ^ ord('mASk'[i % 4])) for i in xrange(1000)]) self.assertEqual(expected, result) masker = util.RepeatedXorMasker('MaSk') first_part = 'The WebSocket Protocol enables two-way communication.' result = masker.mask(first_part) self.assertEqual( '\x19\t6K\x1a\x0418"\x028\x0e9A\x03\x19"\x15<\x08"\rs\x0e#' '\x001\x07(\x12s\x1f:\x0e~\x1c,\x18s\x08"\x0c>\x1e#\x080\n9' '\x08<\x05c', result) second_part = 'It has two parts: a handshake and the data transfer.' result = masker.mask(second_part) self.assertEqual( "('K%\x00 K9\x16<K=\x00!\x1f>[s\nm\t2\x05)\x12;\n&\x04s\n#" "\x05s\x1f%\x04s\x0f,\x152K9\x132\x05>\x076\x19c", result) def get_random_section(source, min_num_chunks): chunks = [] bytes_chunked = 0 while bytes_chunked < len(source): chunk_size = random.randint( 1, min(len(source) / min_num_chunks, len(source) - bytes_chunked)) chunk = source[bytes_chunked:bytes_chunked + chunk_size] chunks.append(chunk) bytes_chunked += chunk_size return chunks class InflaterDeflaterTest(unittest.TestCase): """A unittest for _Inflater and _Deflater class.""" def test_inflate_deflate_default(self): input = b'hello' + '-' * 30000 + b'hello' inflater15 = util._Inflater(15) deflater15 = util._Deflater(15) inflater8 = util._Inflater(8) deflater8 = util._Deflater(8) compressed15 = deflater15.compress_and_finish(input) compressed8 = deflater8.compress_and_finish(input) inflater15.append(compressed15) inflater8.append(compressed8) self.assertNotEqual(compressed15, compressed8) self.assertEqual(input, inflater15.decompress(-1)) self.assertEqual(input, inflater8.decompress(-1)) def test_random_section(self): random.seed(a=0) source = ''.join( [chr(random.randint(0, 255)) for i in xrange(100 * 1024)]) chunked_input = get_random_section(source, 10) print "Input chunk sizes: %r" % [len(c) for c in chunked_input] deflater = util._Deflater(15) compressed = [] for chunk in chunked_input: compressed.append(deflater.compress(chunk)) compressed.append(deflater.compress_and_finish('')) chunked_expectation = get_random_section(source, 10) print ("Expectation chunk sizes: %r" % [len(c) for c in chunked_expectation]) inflater = util._Inflater(15) inflater.append(''.join(compressed)) for chunk in chunked_expectation: decompressed = inflater.decompress(len(chunk)) self.assertEqual(chunk, decompressed) self.assertEqual('', inflater.decompress(-1)) if __name__ == '__main__': unittest.main() # vi:sts=4 sw=4 et
mpl-2.0
xinjiguaike/edx-platform
common/lib/xmodule/xmodule/tests/test_validation.py
110
8467
""" Test xblock/validation.py """ import unittest from xblock.test.tools import assert_raises from xmodule.validation import StudioValidationMessage, StudioValidation from xblock.validation import Validation, ValidationMessage class StudioValidationMessageTest(unittest.TestCase): """ Tests for `ValidationMessage` """ def test_bad_parameters(self): """ Test that `TypeError`s are thrown for bad input parameters. """ with assert_raises(TypeError): StudioValidationMessage("unknown type", u"Unknown type info") with assert_raises(TypeError): StudioValidationMessage(StudioValidationMessage.WARNING, u"bad warning", action_class=0) with assert_raises(TypeError): StudioValidationMessage(StudioValidationMessage.WARNING, u"bad warning", action_runtime_event=0) with assert_raises(TypeError): StudioValidationMessage(StudioValidationMessage.WARNING, u"bad warning", action_label="Non-unicode string") def test_to_json(self): """ Test the `to_json` method. """ self.assertEqual( { "type": StudioValidationMessage.NOT_CONFIGURED, "text": u"Not Configured message", "action_label": u"Action label" }, StudioValidationMessage( StudioValidationMessage.NOT_CONFIGURED, u"Not Configured message", action_label=u"Action label" ).to_json() ) self.assertEqual( { "type": StudioValidationMessage.WARNING, "text": u"Warning message", "action_class": "class-for-action" }, StudioValidationMessage( StudioValidationMessage.WARNING, u"Warning message", action_class="class-for-action" ).to_json() ) self.assertEqual( { "type": StudioValidationMessage.ERROR, "text": u"Error message", "action_runtime_event": "do-fix-up" }, StudioValidationMessage( StudioValidationMessage.ERROR, u"Error message", action_runtime_event="do-fix-up" ).to_json() ) class StudioValidationTest(unittest.TestCase): """ Tests for `StudioValidation` class. """ def test_copy(self): validation = Validation("id") validation.add(ValidationMessage(ValidationMessage.ERROR, u"Error message")) studio_validation = StudioValidation.copy(validation) self.assertIsInstance(studio_validation, StudioValidation) self.assertFalse(studio_validation) self.assertEqual(1, len(studio_validation.messages)) expected = { "type": StudioValidationMessage.ERROR, "text": u"Error message" } self.assertEqual(expected, studio_validation.messages[0].to_json()) self.assertIsNone(studio_validation.summary) def test_copy_studio_validation(self): validation = StudioValidation("id") validation.add( StudioValidationMessage(StudioValidationMessage.WARNING, u"Warning message", action_label=u"Action Label") ) validation_copy = StudioValidation.copy(validation) self.assertFalse(validation_copy) self.assertEqual(1, len(validation_copy.messages)) expected = { "type": StudioValidationMessage.WARNING, "text": u"Warning message", "action_label": u"Action Label" } self.assertEqual(expected, validation_copy.messages[0].to_json()) def test_copy_errors(self): with assert_raises(TypeError): StudioValidation.copy("foo") def test_empty(self): """ Test that `empty` return True iff there are no messages and no summary. Also test the "bool" property of `Validation`. """ validation = StudioValidation("id") self.assertTrue(validation.empty) self.assertTrue(validation) validation.add(StudioValidationMessage(StudioValidationMessage.ERROR, u"Error message")) self.assertFalse(validation.empty) self.assertFalse(validation) validation_with_summary = StudioValidation("id") validation_with_summary.set_summary( StudioValidationMessage(StudioValidationMessage.NOT_CONFIGURED, u"Summary message") ) self.assertFalse(validation.empty) self.assertFalse(validation) def test_add_messages(self): """ Test the behavior of calling `add_messages` with combination of `StudioValidation` instances. """ validation_1 = StudioValidation("id") validation_1.set_summary(StudioValidationMessage(StudioValidationMessage.WARNING, u"Summary message")) validation_1.add(StudioValidationMessage(StudioValidationMessage.ERROR, u"Error message")) validation_2 = StudioValidation("id") validation_2.set_summary(StudioValidationMessage(StudioValidationMessage.ERROR, u"Summary 2 message")) validation_2.add(StudioValidationMessage(StudioValidationMessage.NOT_CONFIGURED, u"Not configured")) validation_1.add_messages(validation_2) self.assertEqual(2, len(validation_1.messages)) self.assertEqual(StudioValidationMessage.ERROR, validation_1.messages[0].type) self.assertEqual(u"Error message", validation_1.messages[0].text) self.assertEqual(StudioValidationMessage.NOT_CONFIGURED, validation_1.messages[1].type) self.assertEqual(u"Not configured", validation_1.messages[1].text) self.assertEqual(StudioValidationMessage.WARNING, validation_1.summary.type) self.assertEqual(u"Summary message", validation_1.summary.text) def test_set_summary_accepts_validation_message(self): """ Test that `set_summary` accepts a ValidationMessage. """ validation = StudioValidation("id") validation.set_summary(ValidationMessage(ValidationMessage.WARNING, u"Summary message")) self.assertEqual(ValidationMessage.WARNING, validation.summary.type) self.assertEqual(u"Summary message", validation.summary.text) def test_set_summary_errors(self): """ Test that `set_summary` errors if argument is not a ValidationMessage. """ with assert_raises(TypeError): StudioValidation("id").set_summary("foo") def test_to_json(self): """ Test the ability to serialize a `StudioValidation` instance. """ validation = StudioValidation("id") expected = { "xblock_id": "id", "messages": [], "empty": True } self.assertEqual(expected, validation.to_json()) validation.add( StudioValidationMessage( StudioValidationMessage.ERROR, u"Error message", action_label=u"Action label", action_class="edit-button" ) ) validation.add( StudioValidationMessage( StudioValidationMessage.NOT_CONFIGURED, u"Not configured message", action_label=u"Action label", action_runtime_event="make groups" ) ) validation.set_summary( StudioValidationMessage( StudioValidationMessage.WARNING, u"Summary message", action_label=u"Summary label", action_runtime_event="fix everything" ) ) # Note: it is important to test all the expected strings here because the client-side model depends on them # (for instance, "warning" vs. using the xblock constant ValidationMessageTypes.WARNING). expected = { "xblock_id": "id", "messages": [ {"type": "error", "text": u"Error message", "action_label": u"Action label", "action_class": "edit-button"}, {"type": "not-configured", "text": u"Not configured message", "action_label": u"Action label", "action_runtime_event": "make groups"} ], "summary": {"type": "warning", "text": u"Summary message", "action_label": u"Summary label", "action_runtime_event": "fix everything"}, "empty": False } self.assertEqual(expected, validation.to_json())
agpl-3.0
quickresolve/accel.ai
flask-aws/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/exceptions.py
515
5599
from __future__ import absolute_import # Base Exceptions class HTTPError(Exception): "Base exception used by this module." pass class HTTPWarning(Warning): "Base warning used by this module." pass class PoolError(HTTPError): "Base exception for errors caused within a pool." def __init__(self, pool, message): self.pool = pool HTTPError.__init__(self, "%s: %s" % (pool, message)) def __reduce__(self): # For pickling purposes. return self.__class__, (None, None) class RequestError(PoolError): "Base exception for PoolErrors that have associated URLs." def __init__(self, pool, url, message): self.url = url PoolError.__init__(self, pool, message) def __reduce__(self): # For pickling purposes. return self.__class__, (None, self.url, None) class SSLError(HTTPError): "Raised when SSL certificate fails in an HTTPS connection." pass class ProxyError(HTTPError): "Raised when the connection to a proxy fails." pass class DecodeError(HTTPError): "Raised when automatic decoding based on Content-Type fails." pass class ProtocolError(HTTPError): "Raised when something unexpected happens mid-request/response." pass #: Renamed to ProtocolError but aliased for backwards compatibility. ConnectionError = ProtocolError # Leaf Exceptions class MaxRetryError(RequestError): """Raised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param string url: The requested Url :param exceptions.Exception reason: The underlying error """ def __init__(self, pool, url, reason=None): self.reason = reason message = "Max retries exceeded with url: %s (Caused by %r)" % ( url, reason) RequestError.__init__(self, pool, url, message) class HostChangedError(RequestError): "Raised when an existing pool gets a request for a foreign host." def __init__(self, pool, url, retries=3): message = "Tried to open a foreign host with url: %s" % url RequestError.__init__(self, pool, url, message) self.retries = retries class TimeoutStateError(HTTPError): """ Raised when passing an invalid state to a timeout """ pass class TimeoutError(HTTPError): """ Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`. """ pass class ReadTimeoutError(TimeoutError, RequestError): "Raised when a socket timeout occurs while receiving data from a server" pass # This timeout error does not have a URL attached and needs to inherit from the # base HTTPError class ConnectTimeoutError(TimeoutError): "Raised when a socket timeout occurs while connecting to a server" pass class NewConnectionError(ConnectTimeoutError, PoolError): "Raised when we fail to establish a new connection. Usually ECONNREFUSED." pass class EmptyPoolError(PoolError): "Raised when a pool runs out of connections and no more are allowed." pass class ClosedPoolError(PoolError): "Raised when a request enters a pool after the pool has been closed." pass class LocationValueError(ValueError, HTTPError): "Raised when there is something wrong with a given URL input." pass class LocationParseError(LocationValueError): "Raised when get_host or similar fails to parse the URL input." def __init__(self, location): message = "Failed to parse: %s" % location HTTPError.__init__(self, message) self.location = location class ResponseError(HTTPError): "Used as a container for an error reason supplied in a MaxRetryError." GENERIC_ERROR = 'too many error responses' SPECIFIC_ERROR = 'too many {status_code} error responses' class SecurityWarning(HTTPWarning): "Warned when perfoming security reducing actions" pass class SubjectAltNameWarning(SecurityWarning): "Warned when connecting to a host with a certificate missing a SAN." pass class InsecureRequestWarning(SecurityWarning): "Warned when making an unverified HTTPS request." pass class SystemTimeWarning(SecurityWarning): "Warned when system time is suspected to be wrong" pass class InsecurePlatformWarning(SecurityWarning): "Warned when certain SSL configuration is not available on a platform." pass class SNIMissingWarning(HTTPWarning): "Warned when making a HTTPS request without SNI available." pass class DependencyWarning(HTTPWarning): """ Warned when an attempt is made to import a module with missing optional dependencies. """ pass class ResponseNotChunked(ProtocolError, ValueError): "Response needs to be chunked in order to read it as chunks." pass class ProxySchemeUnknown(AssertionError, ValueError): "ProxyManager does not support the supplied scheme" # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. def __init__(self, scheme): message = "Not supported proxy scheme %s" % scheme super(ProxySchemeUnknown, self).__init__(message) class HeaderParsingError(HTTPError): "Raised by assert_header_parsing, but we convert it to a log.warning statement." def __init__(self, defects, unparsed_data): message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) super(HeaderParsingError, self).__init__(message)
mit
commtrack/temp-aquatest
apps/receiver/uploadhandler.py
1
7217
try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import os import sys import email import logging from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.uploadedfile import TemporaryUploadedFile, InMemoryUploadedFile from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers class LegacyXFormUploadParsingHandler(FileUploadHandler): """ HQ's version of an upload handler that takes the raw_post_data of a text or multipar/mixed blob and populates the request.FILES variable. This is still a little hacky in how it uses the request.FILES, but the gist is is, that it'll split out the multipart submission into key/value pairs in the request.FILES. When you access request.FILES, django will loop through the available upload handlers to see which one returns something interesting. Prior to this, we would try to access the raw_post_data and parse it by hand. However with trying to support a legacy + multipart/form based system, checking the request.FILES would destroy raw_post_data which was helpful for our control flow. This version of the upload handler will attempt to parse the xform blob and put it into the request.FILES, while putting the original raw data in parameter raw_post_data. """ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Takes raw input that is of a legacy j2me submission for CommCare and parse it into the request.FILES Following the django documentation, the return is a tuple of self.request, querydict<> """ self.activated = True files = {} body = input_data.read() preamble = "Content-type: %s\n" % META['CONTENT_TYPE'] preamble += "Content-length: %d\n\n" % content_length parsed_message = email.message_from_string(preamble + body) for part in parsed_message.walk(): try: if part.get_content_type() == 'multipart/mixed': logging.debug("Multipart part") #this is the main wrapper, this shouldn't ever happen else: content_type = part.get_content_type() if content_type.startswith('text/') or content_type.startswith('multipart/form-data'): uri = 'legacy_xform' else: logging.debug("non XML section: %s" % part['Content-ID']) uri = part['Content-ID'] filename= os.path.basename(uri) payload = part.get_payload().strip() fstream = StringIO(payload) files[filename] = InMemoryUploadedFile( file = fstream, field_name = filename, name=filename, content_type=content_type, size=len(payload), charset = None ) except Exception, e: type, value, tb = sys.exc_info() logging.error("Legacy blob handling error") return if len(files.keys()) == 0: return else: #we've got something and we're going to return the dictionary. #for safety's sake, we'll put in the original raw post for the view to save it just like old times files['raw_post_data'] = body return (self.request, files) def receive_data_chunk(self, raw_data, start): """ Add the data to the StringIO file. """ if self.activated: self.file.write(raw_data) else: return raw_data def file_complete(self, file_size): """ Return a file object if we're activated. """ pass class LegacyXFormUploadBlobHandler(FileUploadHandler): """HQ's version of an upload handler that takes the raw_post_data of a text or multipart/mixed blob and populates the request.FILES variable. This is still a little hacky in how it uses the request.FILES, but the gist is, that it'll split out the multipart submission into key/value pairs in the request.FILES. When you access request.FILES, django will loop through the available upload handlers to see which one returns something interesting. Prior to this, we would try to access the raw_post_data and parse it by hand. However with trying to support a legacy + multipart/form based system, checking the request.FILES would destroy raw_post_data which was helpful for our control flow. This version of the upload handler just turns around the raw input_data and puts it in request.FILES, and does no processing. """ def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None): """ Takes raw input that is of a legacy j2me submission for CommCare and parse it into the request.FILES Following the django documentation, the return is a tuple of self.request, querydict<> """ self.activated = True files = {} rawdata = input_data.read() files['raw_post_data'] = InMemoryUploadedFile( file = StringIO(rawdata), field_name = 'raw_post_data', name='raw_post_data', content_type=META['CONTENT_TYPE'], size=len(rawdata), charset = None ) return (self.request, files) def receive_data_chunk(self, raw_data, start): """ Add the data to the StringIO file. """ if self.activated: self.file.write(raw_data) else: return raw_data def file_complete(self, file_size): """ Return a file object if we're activated. """ pass
bsd-3-clause
gtolan/mrkttrdr
node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
2767
2174
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Applies a fix to CR LF TAB handling in xml.dom. Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 Working around this: http://bugs.python.org/issue5752 TODO(bradnelson): Consider dropping this when we drop XP support. """ import xml.dom.minidom def _Replacement_write_data(writer, data, is_attrib=False): """Writes datachars to writer.""" data = data.replace("&", "&amp;").replace("<", "&lt;") data = data.replace("\"", "&quot;").replace(">", "&gt;") if is_attrib: data = data.replace( "\r", "&#xD;").replace( "\n", "&#xA;").replace( "\t", "&#x9;") writer.write(data) def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): # indent = current indentation # addindent = indentation to add to higher levels # newl = newline string writer.write(indent+"<" + self.tagName) attrs = self._get_attributes() a_names = attrs.keys() a_names.sort() for a_name in a_names: writer.write(" %s=\"" % a_name) _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) writer.write("\"") if self.childNodes: writer.write(">%s" % newl) for node in self.childNodes: node.writexml(writer, indent + addindent, addindent, newl) writer.write("%s</%s>%s" % (indent, self.tagName, newl)) else: writer.write("/>%s" % newl) class XmlFix(object): """Object to manage temporary patching of xml.dom.minidom.""" def __init__(self): # Preserve current xml.dom.minidom functions. self.write_data = xml.dom.minidom._write_data self.writexml = xml.dom.minidom.Element.writexml # Inject replacement versions of a function and a method. xml.dom.minidom._write_data = _Replacement_write_data xml.dom.minidom.Element.writexml = _Replacement_writexml def Cleanup(self): if self.write_data: xml.dom.minidom._write_data = self.write_data xml.dom.minidom.Element.writexml = self.writexml self.write_data = None def __del__(self): self.Cleanup()
mit
drawks/ansible
lib/ansible/modules/network/ironware/ironware_facts.py
38
19930
#!/usr/bin/python # # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: ironware_facts version_added: "2.5" author: "Paul Baker (@paulquack)" short_description: Collect facts from devices running Extreme Ironware description: - Collects a base set of device facts from a remote device that is running Ironware. This module prepends all of the base network fact keys with C(ansible_net_<fact>). The facts module will always collect a base set of facts from the device and can enable or disable collection of additional facts. extends_documentation_fragment: ironware notes: - Tested against Ironware 5.8e options: gather_subset: description: - When supplied, this argument will restrict the facts collected to a given subset. Possible values for this argument include all, hardware, config, mpls and interfaces. Can specify a list of values to include a larger subset. Values can also be used with an initial C(M(!)) to specify that a specific subset should not be collected. required: false default: ['!config','!mpls'] """ EXAMPLES = """ # Collect all facts from the device - ironware_facts: gather_subset: all # Collect only the config and default facts - ironware_facts: gather_subset: - config # Do not collect hardware facts - ironware_facts: gather_subset: - "!hardware" """ RETURN = """ ansible_net_gather_subset: description: The list of fact subsets collected from the device returned: always type: list # default ansible_net_model: description: The model name returned from the device returned: always type: str ansible_net_serialnum: description: The serial number of the remote device returned: always type: str ansible_net_version: description: The operating system version running on the remote device returned: always type: str # hardware ansible_net_filesystems: description: All file system names available on the device returned: when hardware is configured type: list ansible_net_memfree_mb: description: The available free memory on the remote device in Mb returned: when hardware is configured type: int ansible_net_memtotal_mb: description: The total memory on the remote device in Mb returned: when hardware is configured type: int # config ansible_net_config: description: The current active config from the device returned: when config is configured type: str # mpls ansible_net_mpls_lsps: description: All MPLS LSPs configured on the device returned: When LSP is configured type: dict ansible_net_mpls_vll: description: All VLL instances configured on the device returned: When MPLS VLL is configured type: dict ansible_net_mpls_vll_local: description: All VLL-LOCAL instances configured on the device returned: When MPLS VLL-LOCAL is configured type: dict ansible_net_mpls_vpls: description: All VPLS instances configured on the device returned: When MPLS VPLS is configured type: dict # interfaces ansible_net_all_ipv4_addresses: description: All IPv4 addresses configured on the device returned: when interfaces is configured type: list ansible_net_all_ipv6_addresses: description: All IPv6 addresses configured on the device returned: when interfaces is configured type: list ansible_net_interfaces: description: A hash of all interfaces running on the system returned: when interfaces is configured type: dict ansible_net_neighbors: description: The list of LLDP neighbors from the remote device returned: when interfaces is configured type: dict """ import re from ansible.module_utils.network.ironware.ironware import run_commands from ansible.module_utils.network.ironware.ironware import ironware_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import iteritems class FactsBase(object): COMMANDS = list() def __init__(self, module): self.module = module self.facts = dict() self.responses = None def populate(self): self.responses = run_commands(self.module, self.COMMANDS, check_rc=False) def run(self, cmd): return run_commands(self.module, cmd, check_rc=False) class Default(FactsBase): COMMANDS = [ 'show version', 'show chassis' ] def populate(self): super(Default, self).populate() data = self.responses[0] if data: self.facts['version'] = self.parse_version(data) self.facts['serialnum'] = self.parse_serialnum(data) data = self.responses[1] if data: self.facts['model'] = self.parse_model(data) def parse_version(self, data): match = re.search(r'IronWare : Version (\S+)', data) if match: return match.group(1) def parse_model(self, data): match = re.search(r'^\*\*\* (.+) \*\*\*$', data, re.M) if match: return match.group(1) def parse_serialnum(self, data): match = re.search(r'Serial #: (\S+),', data) if match: return match.group(1) class Hardware(FactsBase): COMMANDS = [ 'dir | include Directory', 'show memory' ] def populate(self): super(Hardware, self).populate() data = self.responses[0] if data: self.facts['filesystems'] = self.parse_filesystems(data) data = self.responses[1] if data: self.facts['memtotal_mb'] = int(round(int(self.parse_memtotal(data)) / 1024 / 1024, 0)) self.facts['memfree_mb'] = int(round(int(self.parse_memfree(data)) / 1024 / 1024, 0)) def parse_filesystems(self, data): return re.findall(r'^Directory of (\S+)', data, re.M) def parse_memtotal(self, data): match = re.search(r'Total SDRAM\D*(\d+)\s', data, re.M) if match: return match.group(1) def parse_memfree(self, data): match = re.search(r'(Total Free Memory|Available Memory)\D*(\d+)\s', data, re.M) if match: return match.group(2) class Config(FactsBase): COMMANDS = ['show running-config'] def populate(self): super(Config, self).populate() data = self.responses[0] if data: self.facts['config'] = data class MPLS(FactsBase): COMMANDS = [ 'show mpls lsp detail', 'show mpls vll-local detail', 'show mpls vll detail', 'show mpls vpls detail' ] def populate(self): super(MPLS, self).populate() data = self.responses[0] if data: data = self.parse_mpls(data) self.facts['mpls_lsps'] = self.populate_lsps(data) data = self.responses[1] if data: data = self.parse_mpls(data) self.facts['mpls_vll_local'] = self.populate_vll_local(data) data = self.responses[2] if data: data = self.parse_mpls(data) self.facts['mpls_vll'] = self.populate_vll(data) data = self.responses[3] if data: data = self.parse_mpls(data) self.facts['mpls_vpls'] = self.populate_vpls(data) def parse_mpls(self, data): parsed = dict() for line in data.split('\n'): if not line: continue elif line[0] == ' ': parsed[key] += '\n%s' % line else: match = re.match(r'^(LSP|VLL|VPLS) ([^\s,]+)', line) if match: key = match.group(2) parsed[key] = line return parsed def populate_vpls(self, vpls): facts = dict() for key, value in iteritems(vpls): vpls = dict() vpls['endpoints'] = self.parse_vpls_endpoints(value) vpls['vc-id'] = self.parse_vpls_vcid(value) facts[key] = vpls return facts def populate_vll_local(self, vll_locals): facts = dict() for key, value in iteritems(vll_locals): vll = dict() vll['endpoints'] = self.parse_vll_endpoints(value) facts[key] = vll return facts def populate_vll(self, vlls): facts = dict() for key, value in iteritems(vlls): vll = dict() vll['endpoints'] = self.parse_vll_endpoints(value) vll['vc-id'] = self.parse_vll_vcid(value) vll['cos'] = self.parse_vll_cos(value) facts[key] = vll return facts def parse_vll_vcid(self, data): match = re.search(r'VC-ID (\d+),', data, re.M) if match: return match.group(1) def parse_vll_cos(self, data): match = re.search(r'COS +: +(\d+)', data, re.M) if match: return match.group(1) def parse_vll_endpoints(self, data): facts = list() regex = r'End-point[0-9 ]*: +(?P<tagged>tagged|untagged) +(vlan +(?P<vlan>[0-9]+) +)?(inner- vlan +(?P<innervlan>[0-9]+) +)?(?P<port>e [0-9/]+|--)' matches = re.finditer(regex, data, re.IGNORECASE | re.DOTALL) for match in matches: f = match.groupdict() f['type'] = 'local' facts.append(f) regex = r'Vll-Peer +: +(?P<vllpeer>[0-9\.]+).*Tunnel LSP +: +(?P<lsp>\S+)' matches = re.finditer(regex, data, re.IGNORECASE | re.DOTALL) for match in matches: f = match.groupdict() f['type'] = 'remote' facts.append(f) return facts def parse_vpls_vcid(self, data): match = re.search(r'Id (\d+),', data, re.M) if match: return match.group(1) def parse_vpls_endpoints(self, data): facts = list() regex = r'Vlan (?P<vlanid>[0-9]+)\s(?: +(?:L2.*)\s| +Tagged: (?P<tagged>.+)+\s| +Untagged: (?P<untagged>.+)\s)*' matches = re.finditer(regex, data, re.IGNORECASE) for match in matches: f = match.groupdict() f['type'] = 'local' facts.append(f) regex = r'Peer address: (?P<vllpeer>[0-9\.]+)' matches = re.finditer(regex, data, re.IGNORECASE) for match in matches: f = match.groupdict() f['type'] = 'remote' facts.append(f) return facts def populate_lsps(self, lsps): facts = dict() for key, value in iteritems(lsps): lsp = dict() lsp['to'] = self.parse_lsp_to(value) lsp['from'] = self.parse_lsp_from(value) lsp['adminstatus'] = self.parse_lsp_adminstatus(value) lsp['operstatus'] = self.parse_lsp_operstatus(value) lsp['pri_path'] = self.parse_lsp_pripath(value) lsp['sec_path'] = self.parse_lsp_secpath(value) lsp['frr'] = self.parse_lsp_frr(value) facts[key] = lsp return facts def parse_lsp_to(self, data): match = re.search(r'^LSP .* to (\S+)', data, re.M) if match: return match.group(1) def parse_lsp_from(self, data): match = re.search(r'From: ([^\s,]+),', data, re.M) if match: return match.group(1) def parse_lsp_adminstatus(self, data): match = re.search(r'admin: (\w+),', data, re.M) if match: return match.group(1) def parse_lsp_operstatus(self, data): match = re.search(r'From: .* status: (\w+)', data, re.M) if match: return match.group(1) def parse_lsp_pripath(self, data): match = re.search(r'Pri\. path: ([^\s,]+), up: (\w+), active: (\w+)', data, re.M) if match: path = dict() path['name'] = match.group(1) if match.group(1) != 'NONE' else None path['up'] = True if match.group(2) == 'yes' else False path['active'] = True if match.group(3) == 'yes' else False return path def parse_lsp_secpath(self, data): match = re.search(r'Sec\. path: ([^\s,]+), active: (\w+).*\n.* status: (\w+)', data, re.M) if match: path = dict() path['name'] = match.group(1) if match.group(1) != 'NONE' else None path['up'] = True if match.group(3) == 'up' else False path['active'] = True if match.group(2) == 'yes' else False return path def parse_lsp_frr(self, data): match = re.search(r'Backup LSP: (\w+)', data, re.M) if match: path = dict() path['up'] = True if match.group(1) == 'UP' else False path['name'] = None if path['up']: match = re.search(r'bypass_lsp: (\S)', data, re.M) path['name'] = match.group(1) if match else None return path class Interfaces(FactsBase): COMMANDS = [ 'show interfaces', 'show ipv6 interface', 'show lldp neighbors' ] def populate(self): super(Interfaces, self).populate() self.facts['all_ipv4_addresses'] = list() self.facts['all_ipv6_addresses'] = list() data = self.responses[0] if data: interfaces = self.parse_interfaces(data) self.facts['interfaces'] = self.populate_interfaces(interfaces) data = self.responses[1] if data: data = self.parse_interfaces(data) self.populate_ipv6_interfaces(data) data = self.responses[2] if data and 'LLDP is not running' not in data: self.facts['neighbors'] = self.parse_neighbors(data) def populate_interfaces(self, interfaces): facts = dict() for key, value in iteritems(interfaces): intf = dict() intf['description'] = self.parse_description(value) intf['macaddress'] = self.parse_macaddress(value) ipv4 = self.parse_ipv4(value) intf['ipv4'] = self.parse_ipv4(value) if ipv4: self.add_ip_address(ipv4['address'], 'ipv4') intf['mtu'] = self.parse_mtu(value) intf['bandwidth'] = self.parse_bandwidth(value) intf['duplex'] = self.parse_duplex(value) intf['lineprotocol'] = self.parse_lineprotocol(value) intf['operstatus'] = self.parse_operstatus(value) intf['type'] = self.parse_type(value) facts[key] = intf return facts def populate_ipv6_interfaces(self, data): for key, value in iteritems(data): self.facts['interfaces'][key]['ipv6'] = list() addresses = re.findall(r'\s([0-9a-f]+:+[0-9a-f:]+\/\d+)\s', value, re.M) for addr in addresses: address, masklen = addr.split('/') ipv6 = dict(address=address, masklen=int(masklen)) self.add_ip_address(ipv6['address'], 'ipv6') self.facts['interfaces'][key]['ipv6'].append(ipv6) def add_ip_address(self, address, family): if family == 'ipv4': self.facts['all_ipv4_addresses'].append(address) else: self.facts['all_ipv6_addresses'].append(address) def parse_neighbors(self, neighbors): facts = dict() for line in neighbors.split('\n'): if line == '': continue match = re.search(r'([\d\/]+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line, re.M) if match: intf = match.group(1) if intf not in facts: facts[intf] = list() fact = dict() fact['host'] = match.group(5) fact['port'] = match.group(3) facts[intf].append(fact) return facts def parse_interfaces(self, data): parsed = dict() for line in data.split('\n'): if not line: continue elif line[0] == ' ': parsed[key] += '\n%s' % line else: match = re.match(r'^(\S+Ethernet|eth )(\S+)', line) if match: key = match.group(2) parsed[key] = line return parsed def parse_description(self, data): match = re.search(r'Port name is (.+)$', data, re.M) if match: return match.group(1) def parse_macaddress(self, data): match = re.search(r'address is (\S+)', data) if match: return match.group(1) def parse_ipv4(self, data): match = re.search(r'Internet address is ([^\s,]+)', data) if match: addr, masklen = match.group(1).split('/') return dict(address=addr, masklen=int(masklen)) def parse_mtu(self, data): match = re.search(r'MTU (\d+)', data) if match: return int(match.group(1)) def parse_bandwidth(self, data): match = re.search(r'BW is (\d+)', data) if match: return int(match.group(1)) def parse_duplex(self, data): match = re.search(r'configured duplex \S+ actual (\S+)', data, re.M) if match: return match.group(1) def parse_mediatype(self, data): match = re.search(r'Type\s*:\s*(.+)$', data, re.M) if match: return match.group(1) def parse_type(self, data): match = re.search(r'Hardware is (.+),', data, re.M) if match: return match.group(1) def parse_lineprotocol(self, data): match = re.search(r'line protocol is (\S+)', data, re.M) if match: return match.group(1) def parse_operstatus(self, data): match = re.search(r'^(?:.+) is (.+),', data, re.M) if match: return match.group(1) FACT_SUBSETS = dict( default=Default, hardware=Hardware, interfaces=Interfaces, config=Config, mpls=MPLS, ) VALID_SUBSETS = frozenset(FACT_SUBSETS.keys()) def main(): """main entry point for module execution """ argument_spec = dict( gather_subset=dict(default=["!config", "!mpls"], type='list') ) argument_spec.update(ironware_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) gather_subset = module.params['gather_subset'] runable_subsets = set() exclude_subsets = set() for subset in gather_subset: if subset == 'all': runable_subsets.update(VALID_SUBSETS) continue if subset.startswith('!'): subset = subset[1:] if subset == 'all': exclude_subsets.update(VALID_SUBSETS) continue exclude = True else: exclude = False if subset not in VALID_SUBSETS: module.fail_json(msg='Bad subset') if exclude: exclude_subsets.add(subset) else: runable_subsets.add(subset) if not runable_subsets: runable_subsets.update(VALID_SUBSETS) runable_subsets.difference_update(exclude_subsets) runable_subsets.add('default') facts = dict() facts['gather_subset'] = list(runable_subsets) instances = list() for key in runable_subsets: instances.append(FACT_SUBSETS[key](module)) for inst in instances: inst.populate() facts.update(inst.facts) ansible_facts = dict() for key, value in iteritems(facts): key = 'ansible_net_%s' % key ansible_facts[key] = value check_args(module) module.exit_json(ansible_facts=ansible_facts) if __name__ == '__main__': main()
gpl-3.0
HeatherHillers/RoamMac
src/roam/roam_style.py
1
3020
appstyle = """ * { font: 16px "Segoe UI" ; } QToolButton { padding: 4px; color: #4f4f4f; } QToolButton:hover { padding: 4px; background-color: rgb(211, 228, 255); } QStatusBar { background: white; border: none; } QStatusBar::item { border: none; } QCheckBox { color: #4f4f4f; } QCheckBox::indicator { width: 40px; height: 40px; } QLabel { color: #4f4f4f; } QLabel[projectlabel="true"] { background-color: rgba(255, 255, 255, 0); } QLabel[headerlabel="true"] { font: 75 17pt "Segoe UI"; } QComboBox { border: 1px solid #d3d3d3; } QComboBox::drop-down { width: 30px; } QWidget QStackedWidget { background-color: white; } QScollArea, QScollArea * { background-color: white; } QFrame { background-color: rgb(255,255,255); } QFrame#infoframe { background-color: rgb(255,255,255, 220); } QListWidget:item:hover { background-color: #5b93c2; } QListWidget#layerList { font: 20px "Segoe UI"; background-color: rgb(149,150,145, 220); } QListWidget#layerList::item { color: white; border-bottom: 1px solid black; padding: 4px; } QListWidget#layerList::item::selected { background-color: #5b93c2; } QPushButton { border: 1px solid #e1e1e1; padding: 6px; color: #4f4f4f; } QPushButton:checked { border: 3px solid rgb(137, 175, 255); background-color: rgb(211, 228, 255); } QPushButton:hover { background-color: rgb(211, 228, 255); } QToolButton[action="true"] { border: 1px solid #e1e1e1; padding: 6px; color: #4f4f4f; } QWidget#featureformarea { background-color: white; } QWidget#helpframe { font: 20px "Segoe UI" ; background-color: rgb(255,255,255, 220); } QDialog { color: #4f4f4f; font: 20px "Segoe UI" ; background-color: rgb(255, 255, 255); } QPushButton#deleteButton { background-color: rgb(255, 134, 125); color: white; font: 75 17pt "Segoe UI"; } QLabel#headerlabel { color: rgb(255, 134, 125); font: 75 17pt "Segoe UI"; } QTreeWidget#synctree { font: 20pt "Segoe UI"; } QTreeWidget#synctree::item { padding-top: 4px; } """ menubarstyle = """ QToolBar#menutoolbar { font: 14px "Segoe UI" ; background-color: rgb(85,85,85); padding: 0px; } QToolButton { color: rgb(221,221,219); } QToolButton:hover { color: rgb(0,0,0); } QToolButton:checked { color: rgb(91,147,194); padding-right: 0px; background-color: rgb(240, 240, 240); } """ featureform = """ * { font: 20px "Segoe UI" ; } QPushButton { border: 1px solid rgb(137, 175, 255); background-color: rgb(203, 203, 203, 50); padding: 6px; color: #4f4f4f; } QPushButton:checked { border: 3px solid rgb(137, 175, 255); background-color: rgb(211, 228, 255); } QPushButton:hover { background-color: rgb(211, 228, 255); } QToolButton { border: 1px solid #e1e1e1; padding: 6px; color: #4f4f4f; } QToolButton:checked { border: 3px solid rgb(137, 175, 255); background-color: rgb(211, 228, 255); } """
gpl-2.0
gtaylor/ansible
lib/ansible/runner/lookup_plugins/dig.py
113
8205
# (c) 2015, Jan-Piet Mens <jpmens(at)gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from ansible import utils, errors import socket HAVE_DNS=False try: import dns.resolver import dns.reversename from dns.rdatatype import * from dns.exception import DNSException HAVE_DNS=True except ImportError: pass def make_rdata_dict(rdata): ''' While the 'dig' lookup plugin supports anything which dnspython supports out of the box, the following supported_types list describes which DNS query types we can convert to a dict. Note: adding support for RRSIG is hard work. :) ''' supported_types = { A : ['address'], AAAA : ['address'], CNAME : ['target'], DNAME : ['target'], DLV : ['algorithm', 'digest_type', 'key_tag', 'digest'], DNSKEY : ['flags', 'algorithm', 'protocol', 'key'], DS : ['algorithm', 'digest_type', 'key_tag', 'digest'], HINFO : ['cpu', 'os'], LOC : ['latitude', 'longitude', 'altitude', 'size', 'horizontal_precision', 'vertical_precision'], MX : ['preference', 'exchange'], NAPTR : ['order', 'preference', 'flags', 'service', 'regexp', 'replacement'], NS : ['target'], NSEC3PARAM : ['algorithm', 'flags', 'iterations', 'salt'], PTR : ['target'], RP : ['mbox', 'txt'], # RRSIG : ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'], SOA : ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'], SPF : ['strings'], SRV : ['priority', 'weight', 'port', 'target'], SSHFP : ['algorithm', 'fp_type', 'fingerprint'], TLSA : ['usage', 'selector', 'mtype', 'cert'], TXT : ['strings'], } rd = {} if rdata.rdtype in supported_types: fields = supported_types[rdata.rdtype] for f in fields: val = rdata.__getattribute__(f) if type(val) == dns.name.Name: val = dns.name.Name.to_text(val) if rdata.rdtype == DLV and f == 'digest': val = dns.rdata._hexify(rdata.digest).replace(' ', '') if rdata.rdtype == DS and f == 'digest': val = dns.rdata._hexify(rdata.digest).replace(' ', '') if rdata.rdtype == DNSKEY and f == 'key': val = dns.rdata._base64ify(rdata.key).replace(' ', '') if rdata.rdtype == NSEC3PARAM and f == 'salt': val = dns.rdata._hexify(rdata.salt).replace(' ', '') if rdata.rdtype == SSHFP and f == 'fingerprint': val = dns.rdata._hexify(rdata.fingerprint).replace(' ', '') if rdata.rdtype == TLSA and f == 'cert': val = dns.rdata._hexify(rdata.cert).replace(' ', '') rd[f] = val return rd # ============================================================== # dig: Lookup DNS records # # -------------------------------------------------------------- class LookupModule(object): def __init__(self, basedir=None, **kwargs): self.basedir = basedir if HAVE_DNS == False: raise errors.AnsibleError("Can't LOOKUP(dig): module dns.resolver is not installed") def run(self, terms, inject=None, **kwargs): ''' terms contains a string with things to `dig' for. We support the following formats: example.com # A record example.com qtype=A # same example.com/TXT # specific qtype example.com qtype=txt # same 192.168.1.2/PTR # reverse PTR ^^ shortcut for 2.1.168.192.in-addr.arpa/PTR example.net/AAAA @nameserver # query specified server ^^^ can be comma-sep list of names/addresses ... flat=0 # returns a dict; default is 1 == string ''' terms = terms.split() # Create Resolver object so that we can set NS if necessary myres = dns.resolver.Resolver() edns_size = 4096 myres.use_edns(0, ednsflags=dns.flags.DO, payload=edns_size) domain = None qtype = 'A' flat = True for t in terms: if t.startswith('@'): # e.g. "@10.0.1.2,192.168.1.1" is ok. nsset = t[1:].split(',') nameservers = [] for ns in nsset: # Check if we have a valid IP address. If so, use that, otherwise # try to resolve name to address using system's resolver. If that # fails we bail out. try: socket.inet_aton(ns) nameservers.append(ns) except: try: nsaddr = dns.resolver.query(ns)[0].address nameservers.append(nsaddr) except Exception, e: raise errors.AnsibleError("dns lookup NS: ", str(e)) myres.nameservers = nameservers continue if '=' in t: try: opt, arg = t.split('=') except: pass if opt == 'qtype': qtype = arg.upper() elif opt == 'flat': flat = int(arg) continue if '/' in t: try: domain, qtype = t.split('/') except: domain = t else: domain = t # print "--- domain = {0} qtype={1}".format(domain, qtype) ret = [] if qtype.upper() == 'PTR': try: n = dns.reversename.from_address(domain) domain = n.to_text() except dns.exception.SyntaxError: pass except Exception, e: raise errors.AnsibleError("dns.reversename unhandled exception", str(e)) try: answers = myres.query(domain, qtype) for rdata in answers: s = rdata.to_text() if qtype.upper() == 'TXT': s = s[1:-1] # Strip outside quotes on TXT rdata if flat: ret.append(s) else: try: rd = make_rdata_dict(rdata) rd['owner'] = answers.canonical_name.to_text() rd['type'] = dns.rdatatype.to_text(rdata.rdtype) rd['ttl'] = answers.rrset.ttl ret.append(rd) except Exception, e: ret.append(str(e)) except dns.resolver.NXDOMAIN: ret.append('NXDOMAIN') except dns.resolver.NoAnswer: ret.append("") except dns.resolver.Timeout: ret.append('') except dns.exception.DNSException, e: raise errors.AnsibleError("dns.resolver unhandled exception", e) return ret
gpl-3.0
zlebnik/pyexpertsender
pyexpertsender/utils.py
1
1213
import xml.etree.ElementTree as ET import six xsi = 'http://www.w3.org/2001/XMLSchema-instance' xs = 'http://www.w3.org/2001/XMLSchema' def camel_case(word): return ''.join(x.capitalize() or '_' for x in word.split('_')) def generate_entity(data, parent): if isinstance(data, dict): for key, data in data.items(): if key == 'text': parent.text = six.text_type(data) elif key == 'attrs': for attr, val in data.items(): parent.set('xsi:' + attr, val) else: child = ET.SubElement(parent, camel_case(key)) generate_entity(data, child) elif isinstance(data, list): for value in data: generate_entity(value, parent) else: parent.text = six.text_type(data) def generate_request_xml(api_key, data_type, dict_tree): root = ET.Element('ApiRequest') root.set('xmlns:xsi', xsi) root.set('xmlns:xs', xs) api_key_element = ET.SubElement(root, 'ApiKey') api_key_element.text = api_key if data_type: dict_tree['attrs'] = {'type': data_type} generate_entity({'data': dict_tree}, root) return ET.tostring(root)
mit
sumedh123/debatify
venv/lib/python2.7/site-packages/sqlalchemy/testing/pickleable.py
55
2641
# testing/pickleable.py # Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Classes used in pickling tests, need to be at the module level for unpickling. """ from . import fixtures class User(fixtures.ComparableEntity): pass class Order(fixtures.ComparableEntity): pass class Dingaling(fixtures.ComparableEntity): pass class EmailUser(User): pass class Address(fixtures.ComparableEntity): pass # TODO: these are kind of arbitrary.... class Child1(fixtures.ComparableEntity): pass class Child2(fixtures.ComparableEntity): pass class Parent(fixtures.ComparableEntity): pass class Screen(object): def __init__(self, obj, parent=None): self.obj = obj self.parent = parent class Foo(object): def __init__(self, moredata): self.data = 'im data' self.stuff = 'im stuff' self.moredata = moredata __hash__ = object.__hash__ def __eq__(self, other): return other.data == self.data and \ other.stuff == self.stuff and \ other.moredata == self.moredata class Bar(object): def __init__(self, x, y): self.x = x self.y = y __hash__ = object.__hash__ def __eq__(self, other): return other.__class__ is self.__class__ and \ other.x == self.x and \ other.y == self.y def __str__(self): return "Bar(%d, %d)" % (self.x, self.y) class OldSchool: def __init__(self, x, y): self.x = x self.y = y def __eq__(self, other): return other.__class__ is self.__class__ and \ other.x == self.x and \ other.y == self.y class OldSchoolWithoutCompare: def __init__(self, x, y): self.x = x self.y = y class BarWithoutCompare(object): def __init__(self, x, y): self.x = x self.y = y def __str__(self): return "Bar(%d, %d)" % (self.x, self.y) class NotComparable(object): def __init__(self, data): self.data = data def __hash__(self): return id(self) def __eq__(self, other): return NotImplemented def __ne__(self, other): return NotImplemented class BrokenComparable(object): def __init__(self, data): self.data = data def __hash__(self): return id(self) def __eq__(self, other): raise NotImplementedError def __ne__(self, other): raise NotImplementedError
mit
leiferikb/bitpop
depot_tools/tests/trychange_unittest.py
43
6250
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for trychange.py.""" import os import sys import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from testing_support.super_mox import SuperMoxTestBase import subprocess2 import trychange class TryChangeTestsBase(SuperMoxTestBase): """Setups and tear downs the mocks but doesn't test anything as-is.""" def setUp(self): SuperMoxTestBase.setUp(self) self.mox.StubOutWithMock(subprocess2, 'communicate') self.mox.StubOutWithMock(trychange, 'RunGit') self.mox.StubOutWithMock(trychange.scm.GIT, 'Capture') self.mox.StubOutWithMock(trychange.scm.GIT, 'GenerateDiff') self.mox.StubOutWithMock(trychange.scm.GIT, 'GetCheckoutRoot') self.mox.StubOutWithMock(trychange.scm.GIT, 'GetEmail') self.mox.StubOutWithMock(trychange.scm.GIT, 'GetPatchName') self.mox.StubOutWithMock(trychange.scm.GIT, 'GetUpstreamBranch') self.mox.StubOutWithMock(trychange.scm.SVN, 'GenerateDiff') self.mox.StubOutWithMock(trychange.scm.SVN, 'GetCheckoutRoot') self.mox.StubOutWithMock(trychange.scm.SVN, 'GetEmail') self.fake_root = self.Dir() self.expected_files = ['foo.txt', 'bar.txt'] self.options = trychange.optparse.Values() self.options.files = self.expected_files self.options.diff = None self.options.name = None self.options.email = None self.options.exclude = [] class TryChangeUnittest(TryChangeTestsBase): """General trychange.py tests.""" def testMembersChanged(self): members = [ 'DieWithError', 'EPILOG', 'Escape', 'GIT', 'GIT_PATCH_DIR_BASENAME', 'GetMungedDiff', 'GuessVCS', 'GIT_BRANCH_FILE', 'HELP_STRING', 'Error', 'InvalidScript', 'NoTryServerAccess', 'OptionParser', 'PrintSuccess', 'RunCommand', 'RunGit', 'SCM', 'SVN', 'TryChange', 'USAGE', 'contextlib', 'breakpad', 'datetime', 'errno', 'fix_encoding', 'gcl', 'gclient_utils', 'gerrit_util', 'gen_parser', 'getpass', 'itertools', 'json', 'logging', 'optparse', 'os', 'posixpath', 're', 'scm', 'shutil', 'subprocess2', 'sys', 'tempfile', 'urllib', 'urllib2', 'urlparse'] # If this test fails, you should add the relevant test. self.compareMembers(trychange, members) class TryChangeSimpleTest(unittest.TestCase): # Doesn't require supermox to run. def test_flags(self): cmd = [ '--bot', 'bot1,bot2', '--testfilter', 'test1', '--testfilter', 'test2', '--user', 'joe', '--email', 'joe@example.com', ] options, args = trychange.gen_parser(None).parse_args(cmd) self.assertEquals([], args) # pylint: disable=W0212 bot_spec = trychange._ParseBotList(options.bot, options.testfilter) if options.testfilter: bot_spec = trychange._ApplyTestFilter(options.testfilter, bot_spec) values = trychange._ParseSendChangeOptions(bot_spec, options) self.assertEquals( [ ('user', 'joe'), ('name', None), ('email', 'joe@example.com'), ('bot', 'bot1:test1,test2'), ('bot', 'bot2:test1,test2'), ], values) def test_flags_bad_combination(self): cmd = [ '--bot', 'bot1:test1', '--testfilter', 'test2', ] options, args = trychange.gen_parser(None).parse_args(cmd) self.assertEquals([], args) try: # pylint: disable=W0212 trychange._ParseBotList(options.bot, options.testfilter) self.fail() except ValueError: pass class SVNUnittest(TryChangeTestsBase): """trychange.SVN tests.""" def testMembersChanged(self): members = [ 'AutomagicalSettings', 'CaptureStatus', 'GetCodeReviewSetting', 'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'files', 'file_tuples', ] # If this test fails, you should add the relevant test. self.compareMembers(trychange.SVN, members) def testBasic(self): # pylint: disable=E1103 trychange.os.path.abspath(self.fake_root).AndReturn(self.fake_root) trychange.scm.SVN.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root) trychange.scm.SVN.GenerateDiff(['foo.txt', 'bar.txt'], self.fake_root, full_move=True, revision=None).AndReturn('A diff') trychange.scm.SVN.GetEmail(self.fake_root).AndReturn('georges@example.com') self.mox.ReplayAll() svn = trychange.SVN(self.options, self.fake_root, self.options.files) self.assertEqual(svn.GetFileNames(), self.expected_files) self.assertEqual(svn.checkout_root, self.fake_root) self.assertEqual(svn.GenerateDiff(), 'A diff') class GITUnittest(TryChangeTestsBase): """trychange.GIT tests.""" def testMembersChanged(self): members = [ 'AutomagicalSettings', 'CaptureStatus', 'GetCodeReviewSetting', 'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'files', 'file_tuples', ] # If this test fails, you should add the relevant test. self.compareMembers(trychange.GIT, members) def testBasic(self): # pylint: disable=E1103 trychange.os.path.abspath(self.fake_root).AndReturn(self.fake_root) trychange.scm.GIT.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root) trychange.scm.GIT.GetUpstreamBranch(self.fake_root).AndReturn('somewhere') trychange.RunGit(['diff-index', 'HEAD']) trychange.scm.GIT.GenerateDiff(self.fake_root, full_move=True, files=['foo.txt', 'bar.txt'], branch='somewhere').AndReturn('A diff') trychange.scm.GIT.GetPatchName(self.fake_root).AndReturn('bleh-1233') trychange.scm.GIT.GetEmail(self.fake_root).AndReturn('georges@example.com') self.mox.ReplayAll() git = trychange.GIT(self.options, self.fake_root, self.options.files) self.assertEqual(git.GetFileNames(), self.expected_files) self.assertEqual(git.checkout_root, self.fake_root) self.assertEqual(git.GenerateDiff(), 'A diff') if __name__ == '__main__': unittest.main()
gpl-3.0
pschmied/lilypond
scripts/auxiliar/musicxml_generate_intervals.py
10
1859
#!/usr/bin/env python notes = "CDEFGAB" alterations = [-1, 0, 1] def print_note (octave, note, alteration): print " <note>\n <pitch>\n <step>%s</step>" % notes[note] if alteration <> 0: print " <alter>%s</alter>" % alteration print " <octave>%s</octave>\n </pitch>\n <duration>1</duration>\n <voice>1</voice>\n <type>quarter</type>\n </note>" % octave print """<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE score-partwise PUBLIC "-//Recordare//DTD MusicXML 1.0 Partwise//EN" "http://www.musicxml.org/dtds/partwise.dtd"> <score-partwise> <movement-title>Various piches and interval sizes</movement-title> <part-list> <score-part id="P1"> <part-name>MusicXML Part</part-name> </score-part> </part-list> <!--=========================================================--> <part id="P1"> <measure number="1"> <attributes> <divisions>1</divisions> <key> <fifths>0</fifths> <mode>major</mode> </key> <time symbol="common"> <beats>2</beats> <beat-type>4</beat-type> </time> <clef> <sign>G</sign> <line>2</line> </clef> </attributes> """ start_octave = 5 for octave in (start_octave, start_octave+1): for note in (0,1,2,3,4,5,6): for alteration in alterations: if octave == start_octave and note == 0 and alteration == -1: continue print_note (octave, note, alteration) # if octave == start_octave and note == 0 and alteration == 0: # continue print_note (start_octave-(octave-start_octave)-(1-(7-note)/7), (7-note)%7, -alteration) print """ </measure> </part> </score-partwise> """
gpl-3.0
ajaali/django
django/utils/dateformat.py
365
10712
""" PHP date() style date formatting See http://www.php.net/date for format strings Usage: >>> import datetime >>> d = datetime.datetime.now() >>> df = DateFormat(d) >>> print(df.format('jS F Y H:i')) 7th October 2003 11:39 >>> """ from __future__ import unicode_literals import calendar import datetime import re import time from django.utils import six from django.utils.dates import ( MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR, ) from django.utils.encoding import force_text from django.utils.timezone import get_default_timezone, is_aware, is_naive from django.utils.translation import ugettext as _ re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])') re_escaped = re.compile(r'\\(.)') class Formatter(object): def format(self, formatstr): pieces = [] for i, piece in enumerate(re_formatchars.split(force_text(formatstr))): if i % 2: pieces.append(force_text(getattr(self, piece)())) elif piece: pieces.append(re_escaped.sub(r'\1', piece)) return ''.join(pieces) class TimeFormat(Formatter): def __init__(self, obj): self.data = obj self.timezone = None # We only support timezone when formatting datetime objects, # not date objects (timezone information not appropriate), # or time objects (against established django policy). if isinstance(obj, datetime.datetime): if is_naive(obj): self.timezone = get_default_timezone() else: self.timezone = obj.tzinfo def a(self): "'a.m.' or 'p.m.'" if self.data.hour > 11: return _('p.m.') return _('a.m.') def A(self): "'AM' or 'PM'" if self.data.hour > 11: return _('PM') return _('AM') def B(self): "Swatch Internet time" raise NotImplementedError('may be implemented in a future release') def e(self): """ Timezone name. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" try: if hasattr(self.data, 'tzinfo') and self.data.tzinfo: # Have to use tzinfo.tzname and not datetime.tzname # because datatime.tzname does not expect Unicode return self.data.tzinfo.tzname(self.data) or "" except NotImplementedError: pass return "" def f(self): """ Time, in 12-hour hours and minutes, with minutes left off if they're zero. Examples: '1', '1:30', '2:05', '2' Proprietary extension. """ if self.data.minute == 0: return self.g() return '%s:%s' % (self.g(), self.i()) def g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self.data.hour == 0: return 12 if self.data.hour > 12: return self.data.hour - 12 return self.data.hour def G(self): "Hour, 24-hour format without leading zeros; i.e. '0' to '23'" return self.data.hour def h(self): "Hour, 12-hour format; i.e. '01' to '12'" return '%02d' % self.g() def H(self): "Hour, 24-hour format; i.e. '00' to '23'" return '%02d' % self.G() def i(self): "Minutes; i.e. '00' to '59'" return '%02d' % self.data.minute def O(self): """ Difference to Greenwich time in hours; e.g. '+0200', '-0430'. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" seconds = self.Z() sign = '-' if seconds < 0 else '+' seconds = abs(seconds) return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60) def P(self): """ Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off if they're zero and the strings 'midnight' and 'noon' if appropriate. Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.' Proprietary extension. """ if self.data.minute == 0 and self.data.hour == 0: return _('midnight') if self.data.minute == 0 and self.data.hour == 12: return _('noon') return '%s %s' % (self.f(), self.a()) def s(self): "Seconds; i.e. '00' to '59'" return '%02d' % self.data.second def T(self): """ Time zone of this machine; e.g. 'EST' or 'MDT'. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" name = self.timezone.tzname(self.data) if self.timezone else None if name is None: name = self.format('O') return six.text_type(name) def u(self): "Microseconds; i.e. '000000' to '999999'" return '%06d' % self.data.microsecond def Z(self): """ Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for timezones west of UTC is always negative, and for those east of UTC is always positive. If timezone information is not available, this method returns an empty string. """ if not self.timezone: return "" offset = self.timezone.utcoffset(self.data) # `offset` is a datetime.timedelta. For negative values (to the west of # UTC) only days can be negative (days=-1) and seconds are always # positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0) # Positive offsets have days=0 return offset.days * 86400 + offset.seconds class DateFormat(TimeFormat): year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334] def b(self): "Month, textual, 3 letters, lowercase; e.g. 'jan'" return MONTHS_3[self.data.month] def c(self): """ ISO 8601 Format Example : '2008-01-02T10:30:00.000123' """ return self.data.isoformat() def d(self): "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'" return '%02d' % self.data.day def D(self): "Day of the week, textual, 3 letters; e.g. 'Fri'" return WEEKDAYS_ABBR[self.data.weekday()] def E(self): "Alternative month names as required by some locales. Proprietary extension." return MONTHS_ALT[self.data.month] def F(self): "Month, textual, long; e.g. 'January'" return MONTHS[self.data.month] def I(self): "'1' if Daylight Savings Time, '0' otherwise." if self.timezone and self.timezone.dst(self.data): return '1' else: return '0' def j(self): "Day of the month without leading zeros; i.e. '1' to '31'" return self.data.day def l(self): "Day of the week, textual, long; e.g. 'Friday'" return WEEKDAYS[self.data.weekday()] def L(self): "Boolean for whether it is a leap year; i.e. True or False" return calendar.isleap(self.data.year) def m(self): "Month; i.e. '01' to '12'" return '%02d' % self.data.month def M(self): "Month, textual, 3 letters; e.g. 'Jan'" return MONTHS_3[self.data.month].title() def n(self): "Month without leading zeros; i.e. '1' to '12'" return self.data.month def N(self): "Month abbreviation in Associated Press style. Proprietary extension." return MONTHS_AP[self.data.month] def o(self): "ISO 8601 year number matching the ISO week number (W)" return self.data.isocalendar()[0] def r(self): "RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'" return self.format('D, j M Y H:i:s O') def S(self): "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'" if self.data.day in (11, 12, 13): # Special case return 'th' last = self.data.day % 10 if last == 1: return 'st' if last == 2: return 'nd' if last == 3: return 'rd' return 'th' def t(self): "Number of days in the given month; i.e. '28' to '31'" return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1] def U(self): "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)" if isinstance(self.data, datetime.datetime) and is_aware(self.data): return int(calendar.timegm(self.data.utctimetuple())) else: return int(time.mktime(self.data.timetuple())) def w(self): "Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)" return (self.data.weekday() + 1) % 7 def W(self): "ISO-8601 week number of year, weeks starting on Monday" # Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt week_number = None jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1 weekday = self.data.weekday() + 1 day_of_year = self.z() if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4: if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)): week_number = 53 else: week_number = 52 else: if calendar.isleap(self.data.year): i = 366 else: i = 365 if (i - day_of_year) < (4 - weekday): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) week_number = j // 7 if jan1_weekday > 4: week_number -= 1 return week_number def y(self): "Year, 2 digits; e.g. '99'" return six.text_type(self.data.year)[2:] def Y(self): "Year, 4 digits; e.g. '1999'" return self.data.year def z(self): "Day of the year; i.e. '0' to '365'" doy = self.year_days[self.data.month] + self.data.day if self.L() and self.data.month > 2: doy += 1 return doy def format(value, format_string): "Convenience function" df = DateFormat(value) return df.format(format_string) def time_format(value, format_string): "Convenience function" tf = TimeFormat(value) return tf.format(format_string)
bsd-3-clause
rest-of/the-deck
lambda/lib/python2.7/site-packages/pip/commands/wheel.py
170
7528
# -*- coding: utf-8 -*- from __future__ import absolute_import import logging import os import warnings from pip.basecommand import RequirementCommand from pip.exceptions import CommandError, PreviousBuildDirError from pip.req import RequirementSet from pip.utils import import_or_raise from pip.utils.build import BuildDirectory from pip.utils.deprecation import RemovedInPip10Warning from pip.wheel import WheelCache, WheelBuilder from pip import cmdoptions logger = logging.getLogger(__name__) class WheelCommand(RequirementCommand): """ Build Wheel archives for your requirements and dependencies. Wheel is a built-package format, and offers the advantage of not recompiling your software during every install. For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest. Requirements: setuptools>=0.8, and wheel. 'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels. """ name = 'wheel' usage = """ %prog [options] <requirement specifier> ... %prog [options] -r <requirements file> ... %prog [options] [-e] <vcs project url> ... %prog [options] [-e] <local project path> ... %prog [options] <archive url/path> ...""" summary = 'Build wheels from your requirements.' def __init__(self, *args, **kw): super(WheelCommand, self).__init__(*args, **kw) cmd_opts = self.cmd_opts cmd_opts.add_option( '-w', '--wheel-dir', dest='wheel_dir', metavar='dir', default=os.curdir, help=("Build wheels into <dir>, where the default is the " "current working directory."), ) cmd_opts.add_option(cmdoptions.use_wheel()) cmd_opts.add_option(cmdoptions.no_use_wheel()) cmd_opts.add_option(cmdoptions.no_binary()) cmd_opts.add_option(cmdoptions.only_binary()) cmd_opts.add_option( '--build-option', dest='build_options', metavar='options', action='append', help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") cmd_opts.add_option(cmdoptions.constraints()) cmd_opts.add_option(cmdoptions.editable()) cmd_opts.add_option(cmdoptions.requirements()) cmd_opts.add_option(cmdoptions.src()) cmd_opts.add_option(cmdoptions.no_deps()) cmd_opts.add_option(cmdoptions.build_dir()) cmd_opts.add_option( '--global-option', dest='global_options', action='append', metavar='options', help="Extra global options to be supplied to the setup.py " "call before the 'bdist_wheel' command.") cmd_opts.add_option( '--pre', action='store_true', default=False, help=("Include pre-release and development versions. By default, " "pip only finds stable versions."), ) cmd_opts.add_option(cmdoptions.no_clean()) cmd_opts.add_option(cmdoptions.require_hashes()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, self.parser, ) self.parser.insert_option_group(0, index_opts) self.parser.insert_option_group(0, cmd_opts) def check_required_packages(self): import_or_raise( 'wheel.bdist_wheel', CommandError, "'pip wheel' requires the 'wheel' package. To fix this, run: " "pip install wheel" ) pkg_resources = import_or_raise( 'pkg_resources', CommandError, "'pip wheel' requires setuptools >= 0.8 for dist-info support." " To fix this, run: pip install --upgrade setuptools" ) if not hasattr(pkg_resources, 'DistInfoDistribution'): raise CommandError( "'pip wheel' requires setuptools >= 0.8 for dist-info " "support. To fix this, run: pip install --upgrade " "setuptools" ) def run(self, options, args): self.check_required_packages() cmdoptions.resolve_wheel_no_use_binary(options) cmdoptions.check_install_build_global(options) if options.allow_external: warnings.warn( "--allow-external has been deprecated and will be removed in " "the future. Due to changes in the repository protocol, it no " "longer has any effect.", RemovedInPip10Warning, ) if options.allow_all_external: warnings.warn( "--allow-all-external has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) if options.allow_unverified: warnings.warn( "--allow-unverified has been deprecated and will be removed " "in the future. Due to changes in the repository protocol, it " "no longer has any effect.", RemovedInPip10Warning, ) index_urls = [options.index_url] + options.extra_index_urls if options.no_index: logger.info('Ignoring indexes: %s', ','.join(index_urls)) index_urls = [] if options.build_dir: options.build_dir = os.path.abspath(options.build_dir) with self._build_session(options) as session: finder = self._build_package_finder(options, session) build_delete = (not (options.no_clean or options.build_dir)) wheel_cache = WheelCache(options.cache_dir, options.format_control) with BuildDirectory(options.build_dir, delete=build_delete) as build_dir: requirement_set = RequirementSet( build_dir=build_dir, src_dir=options.src_dir, download_dir=None, ignore_dependencies=options.ignore_dependencies, ignore_installed=True, isolated=options.isolated_mode, session=session, wheel_cache=wheel_cache, wheel_download_dir=options.wheel_dir, require_hashes=options.require_hashes ) self.populate_requirement_set( requirement_set, args, options, finder, session, self.name, wheel_cache ) if not requirement_set.has_requirements: return try: # build wheels wb = WheelBuilder( requirement_set, finder, build_options=options.build_options or [], global_options=options.global_options or [], ) if not wb.build(): raise CommandError( "Failed to build one or more wheels" ) except PreviousBuildDirError: options.no_clean = True raise finally: if not options.no_clean: requirement_set.cleanup_files()
mit
daineseh/kodi-plugin.video.ted-talks-chinese
youtube_dl/extractor/testurl.py
76
2163
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ExtractorError class TestURLIE(InfoExtractor): """ Allows addressing of the test cases as test:yout.*be_1 """ IE_DESC = False # Do not list _VALID_URL = r'test(?:url)?:(?P<id>(?P<extractor>.+?)(?:_(?P<num>[0-9]+))?)$' def _real_extract(self, url): from ..extractor import gen_extractors mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') extractor_id = mobj.group('extractor') all_extractors = gen_extractors() rex = re.compile(extractor_id, flags=re.IGNORECASE) matching_extractors = [ e for e in all_extractors if rex.search(e.IE_NAME)] if len(matching_extractors) == 0: raise ExtractorError( 'No extractors matching %r found' % extractor_id, expected=True) elif len(matching_extractors) > 1: # Is it obvious which one to pick? try: extractor = next( ie for ie in matching_extractors if ie.IE_NAME.lower() == extractor_id.lower()) except StopIteration: raise ExtractorError( ('Found multiple matching extractors: %s' % ' '.join(ie.IE_NAME for ie in matching_extractors)), expected=True) else: extractor = matching_extractors[0] num_str = mobj.group('num') num = int(num_str) if num_str else 0 testcases = [] t = getattr(extractor, '_TEST', None) if t: testcases.append(t) testcases.extend(getattr(extractor, '_TESTS', [])) try: tc = testcases[num] except IndexError: raise ExtractorError( ('Test case %d not found, got only %d tests' % (num, len(testcases))), expected=True) self.to_screen('Test URL: %s' % tc['url']) return { '_type': 'url', 'url': tc['url'], 'id': video_id, }
gpl-2.0
akshatharaj/django
tests/check_framework/test_model_field_deprecation.py
322
2584
from django.core import checks from django.db import models from django.test import SimpleTestCase from .tests import IsolateModelsMixin class TestDeprecatedField(IsolateModelsMixin, SimpleTestCase): def test_default_details(self): class MyField(models.Field): system_check_deprecated_details = {} class Model(models.Model): name = MyField() model = Model() self.assertEqual(model.check(), [ checks.Warning( msg='MyField has been deprecated.', hint=None, obj=Model._meta.get_field('name'), id='fields.WXXX', ) ]) def test_user_specified_details(self): class MyField(models.Field): system_check_deprecated_details = { 'msg': 'This field is deprecated and will be removed soon.', 'hint': 'Use something else.', 'id': 'fields.W999', } class Model(models.Model): name = MyField() model = Model() self.assertEqual(model.check(), [ checks.Warning( msg='This field is deprecated and will be removed soon.', hint='Use something else.', obj=Model._meta.get_field('name'), id='fields.W999', ) ]) class TestRemovedField(IsolateModelsMixin, SimpleTestCase): def test_default_details(self): class MyField(models.Field): system_check_removed_details = {} class Model(models.Model): name = MyField() model = Model() self.assertEqual(model.check(), [ checks.Error( msg='MyField has been removed except for support in historical migrations.', hint=None, obj=Model._meta.get_field('name'), id='fields.EXXX', ) ]) def test_user_specified_details(self): class MyField(models.Field): system_check_removed_details = { 'msg': 'Support for this field is gone.', 'hint': 'Use something else.', 'id': 'fields.E999', } class Model(models.Model): name = MyField() model = Model() self.assertEqual(model.check(), [ checks.Error( msg='Support for this field is gone.', hint='Use something else.', obj=Model._meta.get_field('name'), id='fields.E999', ) ])
bsd-3-clause
hubertta/Wget
testenv/Test-Head.py
23
1069
#!/usr/bin/env python3 from sys import exit from test.http_test import HTTPTest from misc.wget_file import WgetFile """ This test ensures that Wget correctly handles responses to HEAD requests and does not actually download any data """ TEST_NAME = "HEAD Requests" ############# File Definitions ############################################### File1 = "You shall not pass!" A_File = WgetFile ("File1", File1) WGET_OPTIONS = "--method=HEAD" WGET_URLS = [["File1"]] Files = [[A_File]] ExpectedReturnCode = 0 ExpectedDownloadedFiles = [] ################ Pre and Post Test Hooks ##################################### pre_test = { "ServerFiles" : Files, } test_options = { "WgetCommands" : WGET_OPTIONS, "Urls" : WGET_URLS } post_test = { "ExpectedFiles" : ExpectedDownloadedFiles, "ExpectedRetcode" : ExpectedReturnCode } err = HTTPTest ( name=TEST_NAME, pre_hook=pre_test, test_params=test_options, post_hook=post_test ).begin () exit (err)
gpl-3.0
fangxingli/hue
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Cipher/test_DES3.py
117
15558
# -*- coding: utf-8 -*- # # SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher # # Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test suite for Crypto.Cipher.DES3""" __revision__ = "$Id$" from common import dict # For compatibility with Python 2.1 and 2.2 from Crypto.Util.py3compat import * from binascii import hexlify # This is a list of (plaintext, ciphertext, key, description) tuples. SP800_20_A1_KEY = '01' * 24 SP800_20_A2_PT = '00' * 8 test_data = [ # Test vector from Appendix B of NIST SP 800-67 # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block # Cipher" # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf ('54686520717566636b2062726f776e20666f78206a756d70', 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', '0123456789abcdef23456789abcdef01456789abcdef0123', 'NIST SP800-67 B.1'), # Test vectors "The Multi-block Message Test (MMT) for DES and TDES" # http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf ('326a494cd33fe756', 'b22b8d66de970692', '627f460e08104a1043cd265d5840eaf1313edf97df2a8a8c', 'DESMMT #1', dict(mode='CBC', iv='8e29f75ea77e5475')), ('84401f78fe6c10876d8ea23094ea5309', '7b1f7c7e3b1c948ebd04a75ffba7d2f5', '37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae', 'DESMMT #2', dict(mode='CBC', iv='3d1de3cc132e3b65')), # Test vectors from Appendix A of NIST SP 800-20 # "Modes of Operation Validation System for the Triple Data Encryption # Algorithm (TMOVS): Requirements and Procedures" # http://csrc.nist.gov/publications/nistpubs/800-20/800-20.pdf # Table A.1 - Variable Plaintext Known Answer Test ('8000000000000000', '95f8a5e5dd31d900', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #0'), ('4000000000000000', 'dd7f121ca5015619', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #1'), ('2000000000000000', '2e8653104f3834ea', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #2'), ('1000000000000000', '4bd388ff6cd81d4f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #3'), ('0800000000000000', '20b9e767b2fb1456', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #4'), ('0400000000000000', '55579380d77138ef', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #5'), ('0200000000000000', '6cc5defaaf04512f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #6'), ('0100000000000000', '0d9f279ba5d87260', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #7'), ('0080000000000000', 'd9031b0271bd5a0a', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #8'), ('0040000000000000', '424250b37c3dd951', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #9'), ('0020000000000000', 'b8061b7ecd9a21e5', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #10'), ('0010000000000000', 'f15d0f286b65bd28', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #11'), ('0008000000000000', 'add0cc8d6e5deba1', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #12'), ('0004000000000000', 'e6d5f82752ad63d1', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #13'), ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #14'), ('0001000000000000', 'f356834379d165cd', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #15'), ('0000800000000000', '2b9f982f20037fa9', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #16'), ('0000400000000000', '889de068a16f0be6', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #17'), ('0000200000000000', 'e19e275d846a1298', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #18'), ('0000100000000000', '329a8ed523d71aec', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #19'), ('0000080000000000', 'e7fce22557d23c97', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #20'), ('0000040000000000', '12a9f5817ff2d65d', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #21'), ('0000020000000000', 'a484c3ad38dc9c19', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #22'), ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #23'), ('0000008000000000', '750d079407521363', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #24'), ('0000004000000000', '64feed9c724c2faf', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #25'), ('0000002000000000', 'f02b263b328e2b60', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #26'), ('0000001000000000', '9d64555a9a10b852', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #27'), ('0000000800000000', 'd106ff0bed5255d7', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #28'), ('0000000400000000', 'e1652c6b138c64a5', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #29'), ('0000000200000000', 'e428581186ec8f46', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #30'), ('0000000100000000', 'aeb5f5ede22d1a36', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #31'), ('0000000080000000', 'e943d7568aec0c5c', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #32'), ('0000000040000000', 'df98c8276f54b04b', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #33'), ('0000000020000000', 'b160e4680f6c696f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #34'), ('0000000010000000', 'fa0752b07d9c4ab8', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #35'), ('0000000008000000', 'ca3a2b036dbc8502', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #36'), ('0000000004000000', '5e0905517bb59bcf', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #37'), ('0000000002000000', '814eeb3b91d90726', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #38'), ('0000000001000000', '4d49db1532919c9f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #39'), ('0000000000800000', '25eb5fc3f8cf0621', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #40'), ('0000000000400000', 'ab6a20c0620d1c6f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #41'), ('0000000000200000', '79e90dbc98f92cca', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #42'), ('0000000000100000', '866ecedd8072bb0e', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #43'), ('0000000000080000', '8b54536f2f3e64a8', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #44'), ('0000000000040000', 'ea51d3975595b86b', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #45'), ('0000000000020000', 'caffc6ac4542de31', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #46'), ('0000000000010000', '8dd45a2ddf90796c', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #47'), ('0000000000008000', '1029d55e880ec2d0', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #48'), ('0000000000004000', '5d86cb23639dbea9', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #49'), ('0000000000002000', '1d1ca853ae7c0c5f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #50'), ('0000000000001000', 'ce332329248f3228', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #51'), ('0000000000000800', '8405d1abe24fb942', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #52'), ('0000000000000400', 'e643d78090ca4207', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #53'), ('0000000000000200', '48221b9937748a23', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #54'), ('0000000000000100', 'dd7c0bbd61fafd54', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #55'), ('0000000000000080', '2fbc291a570db5c4', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #56'), ('0000000000000040', 'e07c30d7e4e26e12', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #57'), ('0000000000000020', '0953e2258e8e90a1', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #58'), ('0000000000000010', '5b711bc4ceebf2ee', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #59'), ('0000000000000008', 'cc083f1e6d9e85f6', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #60'), ('0000000000000004', 'd2fd8867d50d2dfe', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #61'), ('0000000000000002', '06e7ea22ce92708f', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #62'), ('0000000000000001', '166b40b44aba4bd6', SP800_20_A1_KEY, 'NIST SP800-20 A.1 #63'), # Table A.2 - Variable Key Known Answer Test (SP800_20_A2_PT, '95a8d72813daa94d', '8001010101010101'*3, 'NIST SP800-20 A.2 #0'), (SP800_20_A2_PT, '0eec1487dd8c26d5', '4001010101010101'*3, 'NIST SP800-20 A.2 #1'), (SP800_20_A2_PT, '7ad16ffb79c45926', '2001010101010101'*3, 'NIST SP800-20 A.2 #2'), (SP800_20_A2_PT, 'd3746294ca6a6cf3', '1001010101010101'*3, 'NIST SP800-20 A.2 #3'), (SP800_20_A2_PT, '809f5f873c1fd761', '0801010101010101'*3, 'NIST SP800-20 A.2 #4'), (SP800_20_A2_PT, 'c02faffec989d1fc', '0401010101010101'*3, 'NIST SP800-20 A.2 #5'), (SP800_20_A2_PT, '4615aa1d33e72f10', '0201010101010101'*3, 'NIST SP800-20 A.2 #6'), (SP800_20_A2_PT, '2055123350c00858', '0180010101010101'*3, 'NIST SP800-20 A.2 #7'), (SP800_20_A2_PT, 'df3b99d6577397c8', '0140010101010101'*3, 'NIST SP800-20 A.2 #8'), (SP800_20_A2_PT, '31fe17369b5288c9', '0120010101010101'*3, 'NIST SP800-20 A.2 #9'), (SP800_20_A2_PT, 'dfdd3cc64dae1642', '0110010101010101'*3, 'NIST SP800-20 A.2 #10'), (SP800_20_A2_PT, '178c83ce2b399d94', '0108010101010101'*3, 'NIST SP800-20 A.2 #11'), (SP800_20_A2_PT, '50f636324a9b7f80', '0104010101010101'*3, 'NIST SP800-20 A.2 #12'), (SP800_20_A2_PT, 'a8468ee3bc18f06d', '0102010101010101'*3, 'NIST SP800-20 A.2 #13'), (SP800_20_A2_PT, 'a2dc9e92fd3cde92', '0101800101010101'*3, 'NIST SP800-20 A.2 #14'), (SP800_20_A2_PT, 'cac09f797d031287', '0101400101010101'*3, 'NIST SP800-20 A.2 #15'), (SP800_20_A2_PT, '90ba680b22aeb525', '0101200101010101'*3, 'NIST SP800-20 A.2 #16'), (SP800_20_A2_PT, 'ce7a24f350e280b6', '0101100101010101'*3, 'NIST SP800-20 A.2 #17'), (SP800_20_A2_PT, '882bff0aa01a0b87', '0101080101010101'*3, 'NIST SP800-20 A.2 #18'), (SP800_20_A2_PT, '25610288924511c2', '0101040101010101'*3, 'NIST SP800-20 A.2 #19'), (SP800_20_A2_PT, 'c71516c29c75d170', '0101020101010101'*3, 'NIST SP800-20 A.2 #20'), (SP800_20_A2_PT, '5199c29a52c9f059', '0101018001010101'*3, 'NIST SP800-20 A.2 #21'), (SP800_20_A2_PT, 'c22f0a294a71f29f', '0101014001010101'*3, 'NIST SP800-20 A.2 #22'), (SP800_20_A2_PT, 'ee371483714c02ea', '0101012001010101'*3, 'NIST SP800-20 A.2 #23'), (SP800_20_A2_PT, 'a81fbd448f9e522f', '0101011001010101'*3, 'NIST SP800-20 A.2 #24'), (SP800_20_A2_PT, '4f644c92e192dfed', '0101010801010101'*3, 'NIST SP800-20 A.2 #25'), (SP800_20_A2_PT, '1afa9a66a6df92ae', '0101010401010101'*3, 'NIST SP800-20 A.2 #26'), (SP800_20_A2_PT, 'b3c1cc715cb879d8', '0101010201010101'*3, 'NIST SP800-20 A.2 #27'), (SP800_20_A2_PT, '19d032e64ab0bd8b', '0101010180010101'*3, 'NIST SP800-20 A.2 #28'), (SP800_20_A2_PT, '3cfaa7a7dc8720dc', '0101010140010101'*3, 'NIST SP800-20 A.2 #29'), (SP800_20_A2_PT, 'b7265f7f447ac6f3', '0101010120010101'*3, 'NIST SP800-20 A.2 #30'), (SP800_20_A2_PT, '9db73b3c0d163f54', '0101010110010101'*3, 'NIST SP800-20 A.2 #31'), (SP800_20_A2_PT, '8181b65babf4a975', '0101010108010101'*3, 'NIST SP800-20 A.2 #32'), (SP800_20_A2_PT, '93c9b64042eaa240', '0101010104010101'*3, 'NIST SP800-20 A.2 #33'), (SP800_20_A2_PT, '5570530829705592', '0101010102010101'*3, 'NIST SP800-20 A.2 #34'), (SP800_20_A2_PT, '8638809e878787a0', '0101010101800101'*3, 'NIST SP800-20 A.2 #35'), (SP800_20_A2_PT, '41b9a79af79ac208', '0101010101400101'*3, 'NIST SP800-20 A.2 #36'), (SP800_20_A2_PT, '7a9be42f2009a892', '0101010101200101'*3, 'NIST SP800-20 A.2 #37'), (SP800_20_A2_PT, '29038d56ba6d2745', '0101010101100101'*3, 'NIST SP800-20 A.2 #38'), (SP800_20_A2_PT, '5495c6abf1e5df51', '0101010101080101'*3, 'NIST SP800-20 A.2 #39'), (SP800_20_A2_PT, 'ae13dbd561488933', '0101010101040101'*3, 'NIST SP800-20 A.2 #40'), (SP800_20_A2_PT, '024d1ffa8904e389', '0101010101020101'*3, 'NIST SP800-20 A.2 #41'), (SP800_20_A2_PT, 'd1399712f99bf02e', '0101010101018001'*3, 'NIST SP800-20 A.2 #42'), (SP800_20_A2_PT, '14c1d7c1cffec79e', '0101010101014001'*3, 'NIST SP800-20 A.2 #43'), (SP800_20_A2_PT, '1de5279dae3bed6f', '0101010101012001'*3, 'NIST SP800-20 A.2 #44'), (SP800_20_A2_PT, 'e941a33f85501303', '0101010101011001'*3, 'NIST SP800-20 A.2 #45'), (SP800_20_A2_PT, 'da99dbbc9a03f379', '0101010101010801'*3, 'NIST SP800-20 A.2 #46'), (SP800_20_A2_PT, 'b7fc92f91d8e92e9', '0101010101010401'*3, 'NIST SP800-20 A.2 #47'), (SP800_20_A2_PT, 'ae8e5caa3ca04e85', '0101010101010201'*3, 'NIST SP800-20 A.2 #48'), (SP800_20_A2_PT, '9cc62df43b6eed74', '0101010101010180'*3, 'NIST SP800-20 A.2 #49'), (SP800_20_A2_PT, 'd863dbb5c59a91a0', '0101010101010140'*3, 'NIST SP800-20 A.2 #50'), (SP800_20_A2_PT, 'a1ab2190545b91d7', '0101010101010120'*3, 'NIST SP800-20 A.2 #51'), (SP800_20_A2_PT, '0875041e64c570f7', '0101010101010110'*3, 'NIST SP800-20 A.2 #52'), (SP800_20_A2_PT, '5a594528bebef1cc', '0101010101010108'*3, 'NIST SP800-20 A.2 #53'), (SP800_20_A2_PT, 'fcdb3291de21f0c0', '0101010101010104'*3, 'NIST SP800-20 A.2 #54'), (SP800_20_A2_PT, '869efd7f9f265a09', '0101010101010102'*3, 'NIST SP800-20 A.2 #55'), # "Two-key 3DES". Test vector generated using PyCrypto 2.0.1. # This test is designed to test the DES3 API, not the correctness of the # output. ('21e81b7ade88a259', '5c577d4d9b20c0f8', '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), # The following test vectors have been generated with gpg v1.4.0. # The command line used was: # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ # --disable-mdc --s2k-mode 0 --output ct pt # For an explanation, see test_AES.py . ( 'ac1762037074324fb53ba3596f73656d69746556616c6c6579', # Plaintext, 'YosemiteValley' '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d', # Ciphertext '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2', # Key (hash of 'BearsAhead') 'GPG Test Vector #1', dict(mode='OPENPGP', iv='cd47e2afb8b7e4b0', encrypted_iv='6a7eef0b58050e8b904a' ) ), ] def get_tests(config={}): from Crypto.Cipher import DES3 from common import make_block_tests return make_block_tests(DES3, "DES3", test_data) if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
apache-2.0
RomainBrault/scikit-learn
sklearn/grid_search.py
5
40816
""" The :mod:`sklearn.grid_search` includes utilities to fine-tune the parameters of an estimator. """ from __future__ import print_function # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>, # Gael Varoquaux <gael.varoquaux@normalesup.org> # Andreas Mueller <amueller@ais.uni-bonn.de> # Olivier Grisel <olivier.grisel@ensta.org> # License: BSD 3 clause from abc import ABCMeta, abstractmethod from collections import Mapping, namedtuple, Sized from functools import partial, reduce from itertools import product import operator import warnings import numpy as np from .base import BaseEstimator, is_classifier, clone from .base import MetaEstimatorMixin from .cross_validation import check_cv from .cross_validation import _fit_and_score from .externals.joblib import Parallel, delayed from .externals import six from .utils import check_random_state from .utils.random import sample_without_replacement from .utils.validation import _num_samples, indexable from .utils.metaestimators import if_delegate_has_method from .metrics.scorer import check_scoring from .exceptions import ChangedBehaviorWarning __all__ = ['GridSearchCV', 'ParameterGrid', 'fit_grid_point', 'ParameterSampler', 'RandomizedSearchCV'] warnings.warn("This module was deprecated in version 0.18 in favor of the " "model_selection module into which all the refactored classes " "and functions are moved. This module will be removed in 0.20.", DeprecationWarning) class ParameterGrid(object): """Grid of parameters with a discrete number of values for each. .. deprecated:: 0.18 This module will be removed in 0.20. Use :class:`sklearn.model_selection.ParameterGrid` instead. Can be used to iterate over parameter value combinations with the Python built-in function iter. Read more in the :ref:`User Guide <grid_search>`. Parameters ---------- param_grid : dict of string to sequence, or sequence of such The parameter grid to explore, as a dictionary mapping estimator parameters to sequences of allowed values. An empty dict signifies default parameters. A sequence of dicts signifies a sequence of grids to search, and is useful to avoid exploring parameter combinations that make no sense or have no effect. See the examples below. Examples -------- >>> from sklearn.grid_search import ParameterGrid >>> param_grid = {'a': [1, 2], 'b': [True, False]} >>> list(ParameterGrid(param_grid)) == ( ... [{'a': 1, 'b': True}, {'a': 1, 'b': False}, ... {'a': 2, 'b': True}, {'a': 2, 'b': False}]) True >>> grid = [{'kernel': ['linear']}, {'kernel': ['rbf'], 'gamma': [1, 10]}] >>> list(ParameterGrid(grid)) == [{'kernel': 'linear'}, ... {'kernel': 'rbf', 'gamma': 1}, ... {'kernel': 'rbf', 'gamma': 10}] True >>> ParameterGrid(grid)[1] == {'kernel': 'rbf', 'gamma': 1} True See also -------- :class:`GridSearchCV`: uses ``ParameterGrid`` to perform a full parallelized parameter search. """ def __init__(self, param_grid): if isinstance(param_grid, Mapping): # wrap dictionary in a singleton list to support either dict # or list of dicts param_grid = [param_grid] self.param_grid = param_grid def __iter__(self): """Iterate over the points in the grid. Returns ------- params : iterator over dict of string to any Yields dictionaries mapping each estimator parameter to one of its allowed values. """ for p in self.param_grid: # Always sort the keys of a dictionary, for reproducibility items = sorted(p.items()) if not items: yield {} else: keys, values = zip(*items) for v in product(*values): params = dict(zip(keys, v)) yield params def __len__(self): """Number of points on the grid.""" # Product function that can handle iterables (np.product can't). product = partial(reduce, operator.mul) return sum(product(len(v) for v in p.values()) if p else 1 for p in self.param_grid) def __getitem__(self, ind): """Get the parameters that would be ``ind``th in iteration Parameters ---------- ind : int The iteration index Returns ------- params : dict of string to any Equal to list(self)[ind] """ # This is used to make discrete sampling without replacement memory # efficient. for sub_grid in self.param_grid: # XXX: could memoize information used here if not sub_grid: if ind == 0: return {} else: ind -= 1 continue # Reverse so most frequent cycling parameter comes first keys, values_lists = zip(*sorted(sub_grid.items())[::-1]) sizes = [len(v_list) for v_list in values_lists] total = np.product(sizes) if ind >= total: # Try the next grid ind -= total else: out = {} for key, v_list, n in zip(keys, values_lists, sizes): ind, offset = divmod(ind, n) out[key] = v_list[offset] return out raise IndexError('ParameterGrid index out of range') class ParameterSampler(object): """Generator on parameters sampled from given distributions. .. deprecated:: 0.18 This module will be removed in 0.20. Use :class:`sklearn.model_selection.ParameterSampler` instead. Non-deterministic iterable over random candidate combinations for hyper- parameter search. If all parameters are presented as a list, sampling without replacement is performed. If at least one parameter is given as a distribution, sampling with replacement is used. It is highly recommended to use continuous distributions for continuous parameters. Note that as of SciPy 0.12, the ``scipy.stats.distributions`` do not accept a custom RNG instance and always use the singleton RNG from ``numpy.random``. Hence setting ``random_state`` will not guarantee a deterministic iteration whenever ``scipy.stats`` distributions are used to define the parameter search space. Read more in the :ref:`User Guide <grid_search>`. Parameters ---------- param_distributions : dict Dictionary where the keys are parameters and values are distributions from which a parameter is to be sampled. Distributions either have to provide a ``rvs`` function to sample from them, or can be given as a list of values, where a uniform distribution is assumed. n_iter : integer Number of parameter settings that are produced. random_state : int, RandomState instance or None, optional (default=None) Pseudo random number generator state used for random uniform sampling from lists of possible values instead of scipy.stats distributions. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- params : dict of string to any **Yields** dictionaries mapping each estimator parameter to as sampled value. Examples -------- >>> from sklearn.grid_search import ParameterSampler >>> from scipy.stats.distributions import expon >>> import numpy as np >>> np.random.seed(0) >>> param_grid = {'a':[1, 2], 'b': expon()} >>> param_list = list(ParameterSampler(param_grid, n_iter=4)) >>> rounded_list = [dict((k, round(v, 6)) for (k, v) in d.items()) ... for d in param_list] >>> rounded_list == [{'b': 0.89856, 'a': 1}, ... {'b': 0.923223, 'a': 1}, ... {'b': 1.878964, 'a': 2}, ... {'b': 1.038159, 'a': 2}] True """ def __init__(self, param_distributions, n_iter, random_state=None): self.param_distributions = param_distributions self.n_iter = n_iter self.random_state = random_state def __iter__(self): # check if all distributions are given as lists # in this case we want to sample without replacement all_lists = np.all([not hasattr(v, "rvs") for v in self.param_distributions.values()]) rnd = check_random_state(self.random_state) if all_lists: # look up sampled parameter settings in parameter grid param_grid = ParameterGrid(self.param_distributions) grid_size = len(param_grid) if grid_size < self.n_iter: raise ValueError( "The total space of parameters %d is smaller " "than n_iter=%d." % (grid_size, self.n_iter) + " For exhaustive searches, use GridSearchCV.") for i in sample_without_replacement(grid_size, self.n_iter, random_state=rnd): yield param_grid[i] else: # Always sort the keys of a dictionary, for reproducibility items = sorted(self.param_distributions.items()) for _ in six.moves.range(self.n_iter): params = dict() for k, v in items: if hasattr(v, "rvs"): params[k] = v.rvs() else: params[k] = v[rnd.randint(len(v))] yield params def __len__(self): """Number of points that will be sampled.""" return self.n_iter def fit_grid_point(X, y, estimator, parameters, train, test, scorer, verbose, error_score='raise', **fit_params): """Run fit on one set of parameters. .. deprecated:: 0.18 This module will be removed in 0.20. Use :func:`sklearn.model_selection.fit_grid_point` instead. Parameters ---------- X : array-like, sparse matrix or list Input data. y : array-like or None Targets for input data. estimator : estimator object A object of that type is instantiated for each grid point. This is assumed to implement the scikit-learn estimator interface. Either estimator needs to provide a ``score`` function, or ``scoring`` must be passed. parameters : dict Parameters to be set on estimator for this grid point. train : ndarray, dtype int or bool Boolean mask or indices for training set. test : ndarray, dtype int or bool Boolean mask or indices for test set. scorer : callable or None. If provided must be a scorer callable object / function with signature ``scorer(estimator, X, y)``. verbose : int Verbosity level. **fit_params : kwargs Additional parameter passed to the fit function of the estimator. error_score : 'raise' (default) or numeric Value to assign to the score if an error occurs in estimator fitting. If set to 'raise', the error is raised. If a numeric value is given, FitFailedWarning is raised. This parameter does not affect the refit step, which will always raise the error. Returns ------- score : float Score of this parameter setting on given training / test split. parameters : dict The parameters that have been evaluated. n_samples_test : int Number of test samples in this split. """ score, n_samples_test, _ = _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, error_score) return score, parameters, n_samples_test def _check_param_grid(param_grid): if hasattr(param_grid, 'items'): param_grid = [param_grid] for p in param_grid: for name, v in p.items(): if isinstance(v, np.ndarray) and v.ndim > 1: raise ValueError("Parameter array should be one-dimensional.") check = [isinstance(v, k) for k in (list, tuple, np.ndarray)] if True not in check: raise ValueError("Parameter values for parameter ({0}) need " "to be a sequence.".format(name)) if len(v) == 0: raise ValueError("Parameter values for parameter ({0}) need " "to be a non-empty sequence.".format(name)) class _CVScoreTuple (namedtuple('_CVScoreTuple', ('parameters', 'mean_validation_score', 'cv_validation_scores'))): # A raw namedtuple is very memory efficient as it packs the attributes # in a struct to get rid of the __dict__ of attributes in particular it # does not copy the string for the keys on each instance. # By deriving a namedtuple class just to introduce the __repr__ method we # would also reintroduce the __dict__ on the instance. By telling the # Python interpreter that this subclass uses static __slots__ instead of # dynamic attributes. Furthermore we don't need any additional slot in the # subclass so we set __slots__ to the empty tuple. __slots__ = () def __repr__(self): """Simple custom repr to summarize the main info""" return "mean: {0:.5f}, std: {1:.5f}, params: {2}".format( self.mean_validation_score, np.std(self.cv_validation_scores), self.parameters) class BaseSearchCV(six.with_metaclass(ABCMeta, BaseEstimator, MetaEstimatorMixin)): """Base class for hyper parameter search with cross-validation.""" @abstractmethod def __init__(self, estimator, scoring=None, fit_params=None, n_jobs=1, iid=True, refit=True, cv=None, verbose=0, pre_dispatch='2*n_jobs', error_score='raise'): self.scoring = scoring self.estimator = estimator self.n_jobs = n_jobs self.fit_params = fit_params if fit_params is not None else {} self.iid = iid self.refit = refit self.cv = cv self.verbose = verbose self.pre_dispatch = pre_dispatch self.error_score = error_score @property def _estimator_type(self): return self.estimator._estimator_type @property def classes_(self): return self.best_estimator_.classes_ def score(self, X, y=None): """Returns the score on the given data, if the estimator has been refit. This uses the score defined by ``scoring`` where provided, and the ``best_estimator_.score`` method otherwise. Parameters ---------- X : array-like, shape = [n_samples, n_features] Input data, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. Returns ------- score : float Notes ----- * The long-standing behavior of this method changed in version 0.16. * It no longer uses the metric provided by ``estimator.score`` if the ``scoring`` parameter was set when fitting. """ if self.scorer_ is None: raise ValueError("No score function explicitly defined, " "and the estimator doesn't provide one %s" % self.best_estimator_) if self.scoring is not None and hasattr(self.best_estimator_, 'score'): warnings.warn("The long-standing behavior to use the estimator's " "score function in {0}.score has changed. The " "scoring parameter is now used." "".format(self.__class__.__name__), ChangedBehaviorWarning) return self.scorer_(self.best_estimator_, X, y) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def predict(self, X): """Call predict on the estimator with the best found parameters. Only available if ``refit=True`` and the underlying estimator supports ``predict``. Parameters ----------- X : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.predict(X) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def predict_proba(self, X): """Call predict_proba on the estimator with the best found parameters. Only available if ``refit=True`` and the underlying estimator supports ``predict_proba``. Parameters ----------- X : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.predict_proba(X) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def predict_log_proba(self, X): """Call predict_log_proba on the estimator with the best found parameters. Only available if ``refit=True`` and the underlying estimator supports ``predict_log_proba``. Parameters ----------- X : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.predict_log_proba(X) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def decision_function(self, X): """Call decision_function on the estimator with the best found parameters. Only available if ``refit=True`` and the underlying estimator supports ``decision_function``. Parameters ----------- X : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.decision_function(X) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def transform(self, X): """Call transform on the estimator with the best found parameters. Only available if the underlying estimator supports ``transform`` and ``refit=True``. Parameters ----------- X : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.transform(X) @if_delegate_has_method(delegate=('best_estimator_', 'estimator')) def inverse_transform(self, Xt): """Call inverse_transform on the estimator with the best found parameters. Only available if the underlying estimator implements ``inverse_transform`` and ``refit=True``. Parameters ----------- Xt : indexable, length n_samples Must fulfill the input assumptions of the underlying estimator. """ return self.best_estimator_.inverse_transform(Xt) def _fit(self, X, y, parameter_iterable): """Actual fitting, performing the search over parameters.""" estimator = self.estimator cv = self.cv self.scorer_ = check_scoring(self.estimator, scoring=self.scoring) n_samples = _num_samples(X) X, y = indexable(X, y) if y is not None: if len(y) != n_samples: raise ValueError('Target variable (y) has a different number ' 'of samples (%i) than data (X: %i samples)' % (len(y), n_samples)) cv = check_cv(cv, X, y, classifier=is_classifier(estimator)) if self.verbose > 0: if isinstance(parameter_iterable, Sized): n_candidates = len(parameter_iterable) print("Fitting {0} folds for each of {1} candidates, totalling" " {2} fits".format(len(cv), n_candidates, n_candidates * len(cv))) base_estimator = clone(self.estimator) pre_dispatch = self.pre_dispatch out = Parallel( n_jobs=self.n_jobs, verbose=self.verbose, pre_dispatch=pre_dispatch )( delayed(_fit_and_score)(clone(base_estimator), X, y, self.scorer_, train, test, self.verbose, parameters, self.fit_params, return_parameters=True, error_score=self.error_score) for parameters in parameter_iterable for train, test in cv) # Out is a list of triplet: score, estimator, n_test_samples n_fits = len(out) n_folds = len(cv) scores = list() grid_scores = list() for grid_start in range(0, n_fits, n_folds): n_test_samples = 0 score = 0 all_scores = [] for this_score, this_n_test_samples, _, parameters in \ out[grid_start:grid_start + n_folds]: all_scores.append(this_score) if self.iid: this_score *= this_n_test_samples n_test_samples += this_n_test_samples score += this_score if self.iid: score /= float(n_test_samples) else: score /= float(n_folds) scores.append((score, parameters)) # TODO: shall we also store the test_fold_sizes? grid_scores.append(_CVScoreTuple( parameters, score, np.array(all_scores))) # Store the computed scores self.grid_scores_ = grid_scores # Find the best parameters by comparing on the mean validation score: # note that `sorted` is deterministic in the way it breaks ties best = sorted(grid_scores, key=lambda x: x.mean_validation_score, reverse=True)[0] self.best_params_ = best.parameters self.best_score_ = best.mean_validation_score if self.refit: # fit the best estimator using the entire dataset # clone first to work around broken estimators best_estimator = clone(base_estimator).set_params( **best.parameters) if y is not None: best_estimator.fit(X, y, **self.fit_params) else: best_estimator.fit(X, **self.fit_params) self.best_estimator_ = best_estimator return self class GridSearchCV(BaseSearchCV): """Exhaustive search over specified parameter values for an estimator. .. deprecated:: 0.18 This module will be removed in 0.20. Use :class:`sklearn.model_selection.GridSearchCV` instead. Important members are fit, predict. GridSearchCV implements a "fit" and a "score" method. It also implements "predict", "predict_proba", "decision_function", "transform" and "inverse_transform" if they are implemented in the estimator used. The parameters of the estimator used to apply these methods are optimized by cross-validated grid-search over a parameter grid. Read more in the :ref:`User Guide <grid_search>`. Parameters ---------- estimator : estimator object. A object of that type is instantiated for each grid point. This is assumed to implement the scikit-learn estimator interface. Either estimator needs to provide a ``score`` function, or ``scoring`` must be passed. param_grid : dict or list of dictionaries Dictionary with parameters names (string) as keys and lists of parameter settings to try as values, or a list of such dictionaries, in which case the grids spanned by each dictionary in the list are explored. This enables searching over any sequence of parameter settings. scoring : string, callable or None, default=None A string (see model evaluation documentation) or a scorer callable object / function with signature ``scorer(estimator, X, y)``. If ``None``, the ``score`` method of the estimator is used. fit_params : dict, optional Parameters to pass to the fit method. n_jobs: int, default: 1 : The maximum number of estimators fit in parallel. - If -1 all CPUs are used. - If 1 is given, no parallel computing code is used at all, which is useful for debugging. - For ``n_jobs`` below -1, ``(n_cpus + n_jobs + 1)`` are used. For example, with ``n_jobs = -2`` all CPUs but one are used. .. versionchanged:: 0.17 Upgraded to joblib 0.9.3. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' iid : boolean, default=True If True, the data is assumed to be identically distributed across the folds, and the loss minimized is the total loss per sample, and not the mean loss across the folds. cv : int, cross-validation generator or an iterable, optional Determines the cross-validation splitting strategy. Possible inputs for cv are: - None, to use the default 3-fold cross-validation, - integer, to specify the number of folds. - An object to be used as a cross-validation generator. - An iterable yielding train/test splits. For integer/None inputs, if the estimator is a classifier and ``y`` is either binary or multiclass, :class:`sklearn.model_selection.StratifiedKFold` is used. In all other cases, :class:`sklearn.model_selection.KFold` is used. Refer :ref:`User Guide <cross_validation>` for the various cross-validation strategies that can be used here. refit : boolean, default=True Refit the best estimator with the entire dataset. If "False", it is impossible to make predictions using this GridSearchCV instance after fitting. verbose : integer Controls the verbosity: the higher, the more messages. error_score : 'raise' (default) or numeric Value to assign to the score if an error occurs in estimator fitting. If set to 'raise', the error is raised. If a numeric value is given, FitFailedWarning is raised. This parameter does not affect the refit step, which will always raise the error. Examples -------- >>> from sklearn import svm, grid_search, datasets >>> iris = datasets.load_iris() >>> parameters = {'kernel':('linear', 'rbf'), 'C':[1, 10]} >>> svr = svm.SVC() >>> clf = grid_search.GridSearchCV(svr, parameters) >>> clf.fit(iris.data, iris.target) ... # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS GridSearchCV(cv=None, error_score=..., estimator=SVC(C=1.0, cache_size=..., class_weight=..., coef0=..., decision_function_shape='ovr', degree=..., gamma=..., kernel='rbf', max_iter=-1, probability=False, random_state=None, shrinking=True, tol=..., verbose=False), fit_params={}, iid=..., n_jobs=1, param_grid=..., pre_dispatch=..., refit=..., scoring=..., verbose=...) Attributes ---------- grid_scores_ : list of named tuples Contains scores for all parameter combinations in param_grid. Each entry corresponds to one parameter setting. Each named tuple has the attributes: * ``parameters``, a dict of parameter settings * ``mean_validation_score``, the mean score over the cross-validation folds * ``cv_validation_scores``, the list of scores for each fold best_estimator_ : estimator Estimator that was chosen by the search, i.e. estimator which gave highest score (or smallest loss if specified) on the left out data. Not available if refit=False. best_score_ : float Score of best_estimator on the left out data. best_params_ : dict Parameter setting that gave the best results on the hold out data. scorer_ : function Scorer function used on the held out data to choose the best parameters for the model. Notes ------ The parameters selected are those that maximize the score of the left out data, unless an explicit score is passed in which case it is used instead. If `n_jobs` was set to a value higher than one, the data is copied for each point in the grid (and not `n_jobs` times). This is done for efficiency reasons if individual jobs take very little time, but may raise errors if the dataset is large and not enough memory is available. A workaround in this case is to set `pre_dispatch`. Then, the memory is copied only `pre_dispatch` many times. A reasonable value for `pre_dispatch` is `2 * n_jobs`. See Also --------- :class:`ParameterGrid`: generates all the combinations of a hyperparameter grid. :func:`sklearn.cross_validation.train_test_split`: utility function to split the data into a development set usable for fitting a GridSearchCV instance and an evaluation set for its final evaluation. :func:`sklearn.metrics.make_scorer`: Make a scorer from a performance metric or loss function. """ def __init__(self, estimator, param_grid, scoring=None, fit_params=None, n_jobs=1, iid=True, refit=True, cv=None, verbose=0, pre_dispatch='2*n_jobs', error_score='raise'): super(GridSearchCV, self).__init__( estimator, scoring, fit_params, n_jobs, iid, refit, cv, verbose, pre_dispatch, error_score) self.param_grid = param_grid _check_param_grid(param_grid) def fit(self, X, y=None): """Run fit with all sets of parameters. Parameters ---------- X : array-like, shape = [n_samples, n_features] Training vector, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. """ return self._fit(X, y, ParameterGrid(self.param_grid)) class RandomizedSearchCV(BaseSearchCV): """Randomized search on hyper parameters. .. deprecated:: 0.18 This module will be removed in 0.20. Use :class:`sklearn.model_selection.RandomizedSearchCV` instead. RandomizedSearchCV implements a "fit" and a "score" method. It also implements "predict", "predict_proba", "decision_function", "transform" and "inverse_transform" if they are implemented in the estimator used. The parameters of the estimator used to apply these methods are optimized by cross-validated search over parameter settings. In contrast to GridSearchCV, not all parameter values are tried out, but rather a fixed number of parameter settings is sampled from the specified distributions. The number of parameter settings that are tried is given by n_iter. If all parameters are presented as a list, sampling without replacement is performed. If at least one parameter is given as a distribution, sampling with replacement is used. It is highly recommended to use continuous distributions for continuous parameters. Read more in the :ref:`User Guide <randomized_parameter_search>`. Parameters ---------- estimator : estimator object. A object of that type is instantiated for each grid point. This is assumed to implement the scikit-learn estimator interface. Either estimator needs to provide a ``score`` function, or ``scoring`` must be passed. param_distributions : dict Dictionary with parameters names (string) as keys and distributions or lists of parameters to try. Distributions must provide a ``rvs`` method for sampling (such as those from scipy.stats.distributions). If a list is given, it is sampled uniformly. n_iter : int, default=10 Number of parameter settings that are sampled. n_iter trades off runtime vs quality of the solution. scoring : string, callable or None, default=None A string (see model evaluation documentation) or a scorer callable object / function with signature ``scorer(estimator, X, y)``. If ``None``, the ``score`` method of the estimator is used. fit_params : dict, optional Parameters to pass to the fit method. n_jobs: int, default: 1 : The maximum number of estimators fit in parallel. - If -1 all CPUs are used. - If 1 is given, no parallel computing code is used at all, which is useful for debugging. - For ``n_jobs`` below -1, ``(n_cpus + n_jobs + 1)`` are used. For example, with ``n_jobs = -2`` all CPUs but one are used. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' iid : boolean, default=True If True, the data is assumed to be identically distributed across the folds, and the loss minimized is the total loss per sample, and not the mean loss across the folds. cv : int, cross-validation generator or an iterable, optional Determines the cross-validation splitting strategy. Possible inputs for cv are: - None, to use the default 3-fold cross-validation, - integer, to specify the number of folds. - An object to be used as a cross-validation generator. - An iterable yielding train/test splits. For integer/None inputs, if the estimator is a classifier and ``y`` is either binary or multiclass, :class:`sklearn.model_selection.StratifiedKFold` is used. In all other cases, :class:`sklearn.model_selection.KFold` is used. Refer :ref:`User Guide <cross_validation>` for the various cross-validation strategies that can be used here. refit : boolean, default=True Refit the best estimator with the entire dataset. If "False", it is impossible to make predictions using this RandomizedSearchCV instance after fitting. verbose : integer Controls the verbosity: the higher, the more messages. random_state : int, RandomState instance or None, optional, default=None Pseudo random number generator state used for random uniform sampling from lists of possible values instead of scipy.stats distributions. If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. error_score : 'raise' (default) or numeric Value to assign to the score if an error occurs in estimator fitting. If set to 'raise', the error is raised. If a numeric value is given, FitFailedWarning is raised. This parameter does not affect the refit step, which will always raise the error. Attributes ---------- grid_scores_ : list of named tuples Contains scores for all parameter combinations in param_grid. Each entry corresponds to one parameter setting. Each named tuple has the attributes: * ``parameters``, a dict of parameter settings * ``mean_validation_score``, the mean score over the cross-validation folds * ``cv_validation_scores``, the list of scores for each fold best_estimator_ : estimator Estimator that was chosen by the search, i.e. estimator which gave highest score (or smallest loss if specified) on the left out data. Not available if refit=False. best_score_ : float Score of best_estimator on the left out data. best_params_ : dict Parameter setting that gave the best results on the hold out data. Notes ----- The parameters selected are those that maximize the score of the held-out data, according to the scoring parameter. If `n_jobs` was set to a value higher than one, the data is copied for each parameter setting(and not `n_jobs` times). This is done for efficiency reasons if individual jobs take very little time, but may raise errors if the dataset is large and not enough memory is available. A workaround in this case is to set `pre_dispatch`. Then, the memory is copied only `pre_dispatch` many times. A reasonable value for `pre_dispatch` is `2 * n_jobs`. See Also -------- :class:`GridSearchCV`: Does exhaustive search over a grid of parameters. :class:`ParameterSampler`: A generator over parameter settings, constructed from param_distributions. """ def __init__(self, estimator, param_distributions, n_iter=10, scoring=None, fit_params=None, n_jobs=1, iid=True, refit=True, cv=None, verbose=0, pre_dispatch='2*n_jobs', random_state=None, error_score='raise'): self.param_distributions = param_distributions self.n_iter = n_iter self.random_state = random_state super(RandomizedSearchCV, self).__init__( estimator=estimator, scoring=scoring, fit_params=fit_params, n_jobs=n_jobs, iid=iid, refit=refit, cv=cv, verbose=verbose, pre_dispatch=pre_dispatch, error_score=error_score) def fit(self, X, y=None): """Run fit on the estimator with randomly drawn parameters. Parameters ---------- X : array-like, shape = [n_samples, n_features] Training vector, where n_samples in the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] or [n_samples, n_output], optional Target relative to X for classification or regression; None for unsupervised learning. """ sampled_params = ParameterSampler(self.param_distributions, self.n_iter, random_state=self.random_state) return self._fit(X, y, sampled_params)
bsd-3-clause
ptrendx/mxnet
tools/coreml/test/test_mxnet_converter.py
1
32762
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest import mxnet as mx import numpy as np from converter._mxnet_converter import convert from collections import namedtuple from converter import utils def _mxnet_remove_batch(input_data): for blob in input_data: input_data[blob] = np.reshape(input_data[blob], input_data[blob].shape[1:]) return input_data def _get_mxnet_module(net, data_shapes, mode, label_names, input_names=None): """ Given a symbolic graph, input shape and the initialization mode, returns an MXNet module. """ mx.random.seed(1993) mod = utils.create_module(sym=net, data_shapes=data_shapes, label_shapes=input_names, label_names=label_names) if mode == 'random': mod.init_params( initializer=mx.init.Uniform(scale=.1) ) elif mode == 'zeros': mod.init_params( initializer=mx.init.Zero() ) elif mode == 'ones': mod.init_params( initializer=mx.init.One() ) else: Exception(KeyError("%s is not a valid initialization mode" % mode)) return mod class SingleLayerTest(unittest.TestCase): """ Unit test class for testing where converter is able to convert individual layers or not. In order to do so, it converts model and generates preds on both CoreML and MXNet and check they are the same. """ def _test_mxnet_model(self, net, input_shape, mode, class_labels=None, coreml_mode=None, label_names=None, delta=1e-2, pre_processing_args=None, input_name='data'): """ Helper method that convert the CoreML model into CoreML and compares the predictions over random data. Parameters ---------- net: MXNet Symbol Graph The graph that we'll be converting into CoreML. input_shape: tuple of ints The shape of input data. Generally of the format (batch-size, channels, height, width) mode: (random|zeros|ones) The mode to use in order to set the parameters (weights and biases). label_names: list of strings The names of the output labels. Default: None delta: float The maximum difference b/w predictions of MXNet and CoreML that is tolerable. input_name: str The name of the input variable to the symbolic graph. """ data_shapes = [(input_name, input_shape)] mod = _get_mxnet_module(net, data_shapes, mode, label_names) # Generate some dummy data input_data = {input_name: np.random.uniform(-10., 10., input_shape)} Batch = namedtuple('Batch', ['data']) mod.forward(Batch([mx.nd.array(input_data[input_name])])) mxnet_preds = mod.get_outputs()[0].asnumpy().flatten() # Get predictions from coreml coreml_model = convert( model=mod, class_labels=class_labels, mode=coreml_mode, input_shape={input_name: input_shape}, preprocessor_args=pre_processing_args ) coreml_preds = coreml_model.predict(_mxnet_remove_batch(input_data)).values()[0].flatten() # Check prediction accuracy self.assertEquals(len(mxnet_preds), len(coreml_preds)) for i in range(len(mxnet_preds)): self.assertAlmostEquals(mxnet_preds[i], coreml_preds[i], delta=delta) def test_tiny_inner_product_zero_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) self._test_mxnet_model(net, input_shape=input_shape, mode='zeros') def test_really_tiny_inner_product_ones_input(self): np.random.seed(1988) input_shape = (1, 1) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=1) self._test_mxnet_model(net, input_shape=input_shape, mode='ones') def test_really_tiny_2_inner_product_ones_input(self): np.random.seed(1988) input_shape = (1, 1) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) self._test_mxnet_model(net, input_shape=input_shape, mode='ones') def test_tiny_inner_product_ones_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) self._test_mxnet_model(net, input_shape=input_shape, mode='ones', delta=0.05) def test_tiny_inner_product_random_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_inner_product_no_bias(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5, no_bias=True) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_softmax_random_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.SoftmaxOutput(net, name='softmax') self._test_mxnet_model(net, input_shape=input_shape, mode='random', label_names=['softmax_label']) def test_tiny_relu_activation_random_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.Activation(net, name='relu1', act_type="relu") self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_sigmoid_activation_random_input(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.Activation(net, name='sigmoid1', act_type="sigmoid") self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_tanh_activation_random_input(self): np.random.seed(1988) input_shape = (1, 10) # Define a model net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.Activation(net, name='tanh1', act_type="tanh") self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_really_tiny_conv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (1 ,1) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_ones_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='ones', delta=0.05) def test_tiny_conv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_asym_conv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5 ,3) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_asym_conv_random_asym_input(self): np.random.seed(1988) input_shape = (1, 1, 28, 18) num_filter = 16 kernel = (5, 3) stride = (1, 1) pad = (0, 0) dilate = (1, 1) # define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1', dilate=dilate) net = mx.sym.Activation(net, name='tanh', act_type="tanh") self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_valid_pooling_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (2, 2) stride = (2, 2) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) net = mx.symbol.Pooling( data=net, kernel=kernel, stride=stride, pad=pad, name='pool_1', pool_type='avg', pooling_convention='valid' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_pooling_full_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (2, 2) stride = (2, 2) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) net = mx.symbol.Pooling( data=net, kernel=kernel, stride=stride, pad=pad, name='pool_1', pool_type='avg', pooling_convention='full' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_pooling_full_random_input_with_padding(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 2 kernel = (2, 2) stride = (2, 2) pad = (1, 1) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) net = mx.symbol.Pooling( data=net, kernel=kernel, stride=stride, pad=pad, name='pool_1', pool_type='avg', pooling_convention='full' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_really_tiny_conv_random_3d_input(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 1 kernel = (1, 1) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_really_tiny_conv_random_input_multi_filter(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 64 kernel = (1, 1) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_random_3d_input(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 1 kernel = (5 ,5) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_conv_random_input_multi_filter(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 64 kernel = (5, 5) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_conv_random(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 64 kernel = (5, 5) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_flatten(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 64 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) net = mx.sym.Flatten(data=net, name='flatten1') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.SoftmaxOutput(net, name='softmax') self._test_mxnet_model(net, input_shape=input_shape, mode='random', label_names=['softmax_label']) def test_transpose(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 64 kernel = (5, 5) stride = (1, 1) pad = (0, 0) net = mx.sym.Variable('data') net = mx.sym.transpose(data=net, name='transpose', axes=(0, 1, 2, 3)) net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_reshape(self): np.random.seed(1988) input_shape = (1, 8) net = mx.sym.Variable('data') net = mx.sym.reshape(data=net, shape=(1, 2, 2, 2)) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_synset_random_input(self): np.random.seed(1989) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.SoftmaxOutput(net, name='softmax') mod = _get_mxnet_module(net, data_shapes=[('data', input_shape)], mode='random', label_names=['softmax_label']) # Generate some dummy data input_data = np.random.uniform(-0.1, 0.1, input_shape) Batch = namedtuple('Batch', ['data']) mod.forward(Batch([mx.nd.array(input_data)])) kwargs = {'input_shape': {'data': input_shape}} # Get predictions from coreml coreml_model = convert( model=mod, class_labels=['Category1', 'Category2', 'Category3', 'Category4', 'Category5'], mode='classifier', **kwargs ) prediction = coreml_model.predict( _mxnet_remove_batch({'data': input_data})) self.assertEqual(prediction['classLabel'], 'Category3') def test_really_tiny_deconv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (1, 1) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_deconv_ones_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='ones', delta=0.05) def test_tiny_deconv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_asym_deconv_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 3) stride = (1, 1) pad = (0, 0) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_asym_deconv_random_asym_input(self): np.random.seed(1988) input_shape = (1, 1, 28, 18) num_filter = 16 kernel = (5, 3) stride = (1, 1) pad = (0, 0) dilate = (1, 1) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, dilate=dilate, name='deconv_1' ) net = mx.sym.Activation(net, name = 'tanh', act_type = "tanh") # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_deconv_pooling_random_input(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) net = mx.symbol.Pooling( data=net, kernel=kernel, stride=stride, pad=pad, name='pool_1', pool_type='max' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_really_tiny_deconv_random_3d_input(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 1 kernel = (1, 1) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_really_tiny_deconv_random_input_multi_filter(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 64 kernel = (1, 1) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_deconv_random_3d_input(self): np.random.seed(1988) input_shape = (1, 3, 10, 10) num_filter = 1 kernel = (5, 5) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_tiny_deconv_random_input_multi_filter(self): np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 64 kernel = (5 ,5) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='deconv_1' ) # Test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_deconv_random(self): np.random.seed(1988) input_shape = (1, 10, 4, 4) num_filter = 3 kernel = (2, 2) stride = (1, 1) pad = (0, 0) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, name='deconv_1' ) # test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_deconv_random_output_shape(self): np.random.seed(1988) input_shape = (1, 10, 4, 4) num_filter = 3 kernel = (2, 2) stride = (1, 1) pad = (0, 0) target_shape = (5, 5) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, target_shape=target_shape, name='deconv_1' ) # test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_deconv_random_padding(self): np.random.seed(1988) input_shape = (1, 10, 9, 9) num_filter = 3 kernel = (3, 3) stride = (3, 3) pad = (2, 2) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, name='deconv_1') # test the mxnet model self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_conv_random_padding_odd(self): np.random.seed(1988) input_shape = (1, 10, 6, 6) num_filter = 3 kernel = (5, 5) stride = (1, 1) pad = (3, 3) # define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_conv_random_padding_even(self): np.random.seed(1988) input_shape = (1, 10, 6, 6) num_filter = 3 kernel = (5, 5) stride = (1, 1) pad = (2, 2) # define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_deconv_random_all_inputs(self): np.random.seed(1988) input_shape = (1, 10, 5, 5) num_filter = 3 kernel = (3, 3) stride = (2, 2) pad = (1, 1) dilate = (1, 1) target_shape = (11, 11) # define a model net = mx.sym.Variable('data') net = mx.symbol.Deconvolution( data=net, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, no_bias=False, target_shape=target_shape, dilate=dilate, name='deconv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') def test_batch_norm(self): np.random.seed(1988) input_shape = (1, 1, 2, 3) net = mx.sym.Variable('data') gamma = mx.sym.Variable('gamma') beta = mx.sym.Variable('beta') moving_mean = mx.sym.Variable('moving_mean') moving_var = mx.sym.Variable('moving_var') net = mx.symbol.BatchNorm( data=net, gamma=gamma, beta=beta, moving_mean=moving_mean, moving_var=moving_var, use_global_stats=True, name='batch_norm_1') self._test_mxnet_model(net, input_shape=input_shape, mode='random', delta=1e-2) def test_batch_norm_no_global_stats(self): """ This test should throw an exception since converter doesn't support use_global_stats=False). The reason for this is CoreML doesn't support local batch stats. """ np.random.seed(1988) input_shape = (1, 1, 2, 3) net = mx.sym.Variable('data') gamma = mx.sym.Variable('gamma') beta = mx.sym.Variable('beta') moving_mean = mx.sym.Variable('moving_mean') moving_var = mx.sym.Variable('moving_var') net = mx.symbol.BatchNorm( data=net, gamma=gamma, beta=beta, moving_mean=moving_mean, moving_var=moving_var, use_global_stats=False, name='batch_norm_1') self._test_mxnet_model(net, input_shape=input_shape, mode='random', delta=1e-2) def test_batch_norm_with_fix_gamma(self): """ The gamma will always be an array of ones when fix_gamma=True. The values of gamma may be changed accidentally if there have been fix_gamma=False before the final trained model. """ np.random.seed(1988) input_shape = (1, 1, 2, 3) net = mx.sym.Variable('data') gamma = mx.sym.Variable('gamma') beta = mx.sym.Variable('beta') moving_mean = mx.sym.Variable('moving_mean') moving_var = mx.sym.Variable('moving_var') net = mx.symbol.BatchNorm( data=net, gamma=gamma, beta=beta, moving_mean=moving_mean, moving_var=moving_var, fix_gamma=True, name='batch_norm_1') self._test_mxnet_model(net, input_shape=input_shape, mode='random', delta=1e-2) np.random.seed(1988) net = mx.symbol.BatchNorm( data=net, gamma=gamma, beta=beta, moving_mean=moving_mean, moving_var=moving_var, fix_gamma=False, name='batch_norm_2') self._test_mxnet_model(net, input_shape=input_shape, mode='random', delta=1e-2) def test_pre_processing_args(self): np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) net = mx.sym.SoftmaxOutput(net, name='softmax') self._test_mxnet_model(net, input_shape=input_shape, mode='random', label_names=['softmax_label'], pre_processing_args={'red_bias': 0, 'blue_bias': 0, 'green_bias': 0, 'image_scale': 1}) def test_different_input_variables(self): """ Verifying the behavior when input variable name is different than the standard name - 'data'. """ np.random.seed(1988) input_shape = (1, 10) net = mx.sym.Variable('data1') net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5) self._test_mxnet_model(net, input_shape=input_shape, mode='zeros', input_name='data1') def test_really_tiny_conv_optional_params(self): """ Verifying the behavior of a convolutional layer when stride and pad are not provided. """ np.random.seed(1988) input_shape = (1, 1, 10, 10) num_filter = 1 kernel = (1, 1) # Define a model net = mx.sym.Variable('data') net = mx.symbol.Convolution( data=net, num_filter=num_filter, kernel=kernel, name='conv_1' ) self._test_mxnet_model(net, input_shape=input_shape, mode='random') # TODO test_concat if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(SingleLayerTest) unittest.TextTestRunner(verbosity=2).run(suite)
apache-2.0
AgileInstitute/labs-cpp-gtest-xcode
LunEx/gmock-1.7.0/test/gmock_output_test.py
986
5999
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests the text output of Google C++ Mocking Framework. SYNOPSIS gmock_output_test.py --build_dir=BUILD/DIR --gengolden # where BUILD/DIR contains the built gmock_output_test_ file. gmock_output_test.py --gengolden gmock_output_test.py """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sys import gmock_test_utils # The flag for generating the golden file GENGOLDEN_FLAG = '--gengolden' PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_output_test_') COMMAND = [PROGRAM_PATH, '--gtest_stack_trace_depth=0', '--gtest_print_time=0'] GOLDEN_NAME = 'gmock_output_test_golden.txt' GOLDEN_PATH = os.path.join(gmock_test_utils.GetSourceDir(), GOLDEN_NAME) def ToUnixLineEnding(s): """Changes all Windows/Mac line endings in s to UNIX line endings.""" return s.replace('\r\n', '\n').replace('\r', '\n') def RemoveReportHeaderAndFooter(output): """Removes Google Test result report's header and footer from the output.""" output = re.sub(r'.*gtest_main.*\n', '', output) output = re.sub(r'\[.*\d+ tests.*\n', '', output) output = re.sub(r'\[.* test environment .*\n', '', output) output = re.sub(r'\[=+\] \d+ tests .* ran.*', '', output) output = re.sub(r'.* FAILED TESTS\n', '', output) return output def RemoveLocations(output): """Removes all file location info from a Google Test program's output. Args: output: the output of a Google Test program. Returns: output with all file location info (in the form of 'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or 'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by 'FILE:#: '. """ return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\:', 'FILE:#:', output) def NormalizeErrorMarker(output): """Normalizes the error marker, which is different on Windows vs on Linux.""" return re.sub(r' error: ', ' Failure\n', output) def RemoveMemoryAddresses(output): """Removes memory addresses from the test output.""" return re.sub(r'@\w+', '@0x#', output) def RemoveTestNamesOfLeakedMocks(output): """Removes the test names of leaked mock objects from the test output.""" return re.sub(r'\(used in test .+\) ', '', output) def GetLeakyTests(output): """Returns a list of test names that leak mock objects.""" # findall() returns a list of all matches of the regex in output. # For example, if '(used in test FooTest.Bar)' is in output, the # list will contain 'FooTest.Bar'. return re.findall(r'\(used in test (.+)\)', output) def GetNormalizedOutputAndLeakyTests(output): """Normalizes the output of gmock_output_test_. Args: output: The test output. Returns: A tuple (the normalized test output, the list of test names that have leaked mocks). """ output = ToUnixLineEnding(output) output = RemoveReportHeaderAndFooter(output) output = NormalizeErrorMarker(output) output = RemoveLocations(output) output = RemoveMemoryAddresses(output) return (RemoveTestNamesOfLeakedMocks(output), GetLeakyTests(output)) def GetShellCommandOutput(cmd): """Runs a command in a sub-process, and returns its STDOUT in a string.""" return gmock_test_utils.Subprocess(cmd, capture_stderr=False).output def GetNormalizedCommandOutputAndLeakyTests(cmd): """Runs a command and returns its normalized output and a list of leaky tests. Args: cmd: the shell command. """ # Disables exception pop-ups on Windows. os.environ['GTEST_CATCH_EXCEPTIONS'] = '1' return GetNormalizedOutputAndLeakyTests(GetShellCommandOutput(cmd)) class GMockOutputTest(gmock_test_utils.TestCase): def testOutput(self): (output, leaky_tests) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'rb') golden = golden_file.read() golden_file.close() # The normalized output should match the golden file. self.assertEquals(golden, output) # The raw output should contain 2 leaked mock object errors for # test GMockOutputTest.CatchesLeakedMocks. self.assertEquals(['GMockOutputTest.CatchesLeakedMocks', 'GMockOutputTest.CatchesLeakedMocks'], leaky_tests) if __name__ == '__main__': if sys.argv[1:] == [GENGOLDEN_FLAG]: (output, _) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'wb') golden_file.write(output) golden_file.close() else: gmock_test_utils.Main()
mit
CiscoSystems/openstack-dashboard
django-openstack/django_openstack/dash/views/floating_ips.py
3
7320
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2011 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2011 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Views for managing Nova floating ips. """ import logging from django import template from django.contrib import messages from django.contrib.auth.decorators import login_required from django import shortcuts from django_openstack import api from django_openstack import forms from novaclient import exceptions as novaclient_exceptions LOG = logging.getLogger('django_openstack.dash.views.floating_ip') class ReleaseFloatingIp(forms.SelfHandlingForm): floating_ip_id = forms.CharField(widget=forms.HiddenInput()) def handle(self, request, data): try: LOG.info('Releasing Floating IP "%s"' % data['floating_ip_id']) api.tenant_floating_ip_release(request, data['floating_ip_id']) messages.info(request, 'Successfully released Floating IP: %s' \ % data['floating_ip_id']) except novaclient_exceptions.ClientException, e: LOG.error("ClientException in ReleaseFloatingIp", exc_info=True) messages.error(request, 'Error releasing Floating IP from tenant: \ %s' % e.message) return shortcuts.redirect(request.build_absolute_uri()) class FloatingIpAssociate(forms.SelfHandlingForm): floating_ip_id = forms.CharField(widget=forms.HiddenInput()) floating_ip = forms.CharField(widget=forms.TextInput( attrs={'readonly': 'readonly'})) instance_id = forms.ChoiceField() def __init__(self, *args, **kwargs): super(FloatingIpAssociate, self).__init__(*args, **kwargs) instancelist = kwargs.get('initial', {}).get('instances', []) self.fields['instance_id'] = forms.ChoiceField( choices=instancelist, label="Instance") def handle(self, request, data): try: api.server_add_floating_ip(request, data['instance_id'], data['floating_ip_id']) LOG.info('Associating Floating IP "%s" with Instance "%s"' % (data['floating_ip'], data['instance_id'])) messages.info(request, 'Successfully associated Floating IP: %s \ with Instance: %s' % (data['floating_ip'], data['instance_id'])) except novaclient_exceptions.ClientException, e: LOG.error("ClientException in FloatingIpAssociate", exc_info=True) messages.error(request, 'Error associating Floating IP: %s' % e.message) return shortcuts.redirect('dash_floating_ips', request.user.tenant) class FloatingIpDisassociate(forms.SelfHandlingForm): floating_ip_id = forms.CharField(widget=forms.HiddenInput()) def handle(self, request, data): try: fip = api.tenant_floating_ip_get(request, data['floating_ip_id']) api.server_remove_floating_ip(request, fip.instance_id, fip.id) LOG.info('Disassociating Floating IP "%s"' % data['floating_ip_id']) messages.info(request, 'Successfully disassociated Floating IP: %s' % data['floating_ip_id']) except novaclient_exceptions.ClientException, e: LOG.error("ClientException in FloatingIpAssociate", exc_info=True) messages.error(request, 'Error disassociating Floating IP: %s' % e.message) return shortcuts.redirect('dash_floating_ips', request.user.tenant) class FloatingIpAllocate(forms.SelfHandlingForm): tenant_id = forms.CharField(widget=forms.HiddenInput()) def handle(self, request, data): try: fip = api.tenant_floating_ip_allocate(request) LOG.info('Allocating Floating IP "%s" to tenant "%s"' % (fip.ip, data['tenant_id'])) messages.success(request, 'Successfully allocated Floating IP "%s"\ to tenant "%s"' % (fip.ip, data['tenant_id'])) except novaclient_exceptions.ClientException, e: LOG.error("ClientException in FloatingIpAllocate", exc_info=True) messages.error(request, 'Error allocating Floating IP "%s"\ to tenant "%s": %s' % (fip.ip, data['tenant_id'], e.message)) return shortcuts.redirect('dash_floating_ips', request.user.tenant) @login_required def index(request, tenant_id): for f in (ReleaseFloatingIp, FloatingIpDisassociate, FloatingIpAllocate): _unused, handled = f.maybe_handle(request) if handled: return handled try: floating_ips = api.tenant_floating_ip_list(request) except novaclient_exceptions.ClientException, e: floating_ips = [] LOG.error("ClientException in floating ip index", exc_info=True) messages.error(request, 'Error fetching floating ips: %s' % e.message) return shortcuts.render_to_response( 'django_openstack/dash/floating_ips/index.html', { 'allocate_form': FloatingIpAllocate(initial={ 'tenant_id': request.user.tenant}), 'disassociate_form': FloatingIpDisassociate(), 'floating_ips': floating_ips, 'release_form': ReleaseFloatingIp(), }, context_instance=template.RequestContext(request)) @login_required def associate(request, tenant_id, ip_id): instancelist = [(server.id, 'id: %s, name: %s' % (server.id, server.name)) for server in api.server_list(request)] form, handled = FloatingIpAssociate().maybe_handle(request, initial={ 'floating_ip_id': ip_id, 'floating_ip': api.tenant_floating_ip_get(request, ip_id).ip, 'instances': instancelist}) if handled: return handled return shortcuts.render_to_response( 'django_openstack/dash/floating_ips/associate.html', { 'associate_form': form, }, context_instance=template.RequestContext(request)) @login_required def disassociate(request, tenant_id, ip_id): form, handled = FloatingIpDisassociate().maybe_handle(request) if handled: return handled return shortcuts.render_to_response( 'django_openstack/dash/floating_ips/associate.html', { }, context_instance=template.RequestContext(request))
apache-2.0
fkorotkov/pants
src/python/pants/core_tasks/deferred_sources_mapper.py
6
2910
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import logging import os from pants.base.build_environment import get_buildroot from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.remote_sources import RemoteSources from pants.source.wrapped_globs import Files from pants.task.task import Task logger = logging.getLogger(__name__) class DeferredSourcesMapper(Task): """Map DeferredSourcesFields to files that produce the product 'unpacked_archives'. If you want a task to be able to map sources like this, make it require the 'deferred_sources' product. """ class SourcesTargetLookupError(AddressLookupError): """Raised when the referenced target cannot be found in the build graph""" pass class NoUnpackedSourcesError(AddressLookupError): """Raised when there are no files found unpacked from the archive""" pass @classmethod def product_types(cls): """ Declare product produced by this task deferred_sources does not have any data associated with it. Downstream tasks can depend on it just make sure that this task completes first. :return: """ return ['deferred_sources'] @classmethod def prepare(cls, options, round_manager): round_manager.require_data('unpacked_archives') @classmethod def register_options(cls, register): register('--allow-from-target', default=True, type=bool, removal_hint='from_target was removed in 1.3.0', removal_version='1.5.0.dev0', help='This option has no effect, because from_target has been removed.') def process_remote_sources(self): """Create synthetic targets with populated sources from remote_sources targets.""" unpacked_sources = self.context.products.get_data('unpacked_archives') remote_sources_targets = self.context.targets(predicate=lambda t: isinstance(t, RemoteSources)) for target in remote_sources_targets: sources, rel_unpack_dir = unpacked_sources[target.sources_target] synthetic_target = self.context.add_new_target( address=Address(os.path.relpath(self.workdir, get_buildroot()), target.id), target_type=target.destination_target_type, dependencies=target.dependencies, sources=Files.create_fileset_with_spec(rel_unpack_dir, *sources), derived_from=target, **target.destination_target_args ) for dependent in self.context.build_graph.dependents_of(target.address): self.context.build_graph.inject_dependency(dependent, synthetic_target.address) def execute(self): self.process_remote_sources()
apache-2.0
campbe13/openhatch
vendor/packages/gdata/tests/atom_tests/auth_test.py
128
1342
#!/usr/bin/env python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This module is used for version 2 of the Google Data APIs. __author__ = 'j.s@google.com (Jeff Scudder)' import unittest import atom.auth import atom.http_core class BasicAuthTest(unittest.TestCase): def test_modify_request(self): http_request = atom.http_core.HttpRequest() credentials = atom.auth.BasicAuth('Aladdin', 'open sesame') self.assert_(credentials.basic_cookie == 'QWxhZGRpbjpvcGVuIHNlc2FtZQ==') credentials.modify_request(http_request) self.assert_(http_request.headers[ 'Authorization'] == 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==') def suite(): return unittest.TestSuite((unittest.makeSuite(BasicAuthTest,'test'),)) if __name__ == '__main__': unittest.main()
agpl-3.0
MarishaYasko/interactive-stories-stands
InteractiveStands/Lib/encodings/cp862.py
272
33370
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP862.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp862', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x05d0, # HEBREW LETTER ALEF 0x0081: 0x05d1, # HEBREW LETTER BET 0x0082: 0x05d2, # HEBREW LETTER GIMEL 0x0083: 0x05d3, # HEBREW LETTER DALET 0x0084: 0x05d4, # HEBREW LETTER HE 0x0085: 0x05d5, # HEBREW LETTER VAV 0x0086: 0x05d6, # HEBREW LETTER ZAYIN 0x0087: 0x05d7, # HEBREW LETTER HET 0x0088: 0x05d8, # HEBREW LETTER TET 0x0089: 0x05d9, # HEBREW LETTER YOD 0x008a: 0x05da, # HEBREW LETTER FINAL KAF 0x008b: 0x05db, # HEBREW LETTER KAF 0x008c: 0x05dc, # HEBREW LETTER LAMED 0x008d: 0x05dd, # HEBREW LETTER FINAL MEM 0x008e: 0x05de, # HEBREW LETTER MEM 0x008f: 0x05df, # HEBREW LETTER FINAL NUN 0x0090: 0x05e0, # HEBREW LETTER NUN 0x0091: 0x05e1, # HEBREW LETTER SAMEKH 0x0092: 0x05e2, # HEBREW LETTER AYIN 0x0093: 0x05e3, # HEBREW LETTER FINAL PE 0x0094: 0x05e4, # HEBREW LETTER PE 0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI 0x0096: 0x05e6, # HEBREW LETTER TSADI 0x0097: 0x05e7, # HEBREW LETTER QOF 0x0098: 0x05e8, # HEBREW LETTER RESH 0x0099: 0x05e9, # HEBREW LETTER SHIN 0x009a: 0x05ea, # HEBREW LETTER TAV 0x009b: 0x00a2, # CENT SIGN 0x009c: 0x00a3, # POUND SIGN 0x009d: 0x00a5, # YEN SIGN 0x009e: 0x20a7, # PESETA SIGN 0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK 0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE 0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE 0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE 0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE 0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE 0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE 0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR 0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR 0x00a8: 0x00bf, # INVERTED QUESTION MARK 0x00a9: 0x2310, # REVERSED NOT SIGN 0x00aa: 0x00ac, # NOT SIGN 0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF 0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER 0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK 0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00b0: 0x2591, # LIGHT SHADE 0x00b1: 0x2592, # MEDIUM SHADE 0x00b2: 0x2593, # DARK SHADE 0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL 0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL 0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT 0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT 0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL 0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL 0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT 0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x00db: 0x2588, # FULL BLOCK 0x00dc: 0x2584, # LOWER HALF BLOCK 0x00dd: 0x258c, # LEFT HALF BLOCK 0x00de: 0x2590, # RIGHT HALF BLOCK 0x00df: 0x2580, # UPPER HALF BLOCK 0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA 0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN) 0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA 0x00e3: 0x03c0, # GREEK SMALL LETTER PI 0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA 0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA 0x00e6: 0x00b5, # MICRO SIGN 0x00e7: 0x03c4, # GREEK SMALL LETTER TAU 0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI 0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA 0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA 0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA 0x00ec: 0x221e, # INFINITY 0x00ed: 0x03c6, # GREEK SMALL LETTER PHI 0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON 0x00ef: 0x2229, # INTERSECTION 0x00f0: 0x2261, # IDENTICAL TO 0x00f1: 0x00b1, # PLUS-MINUS SIGN 0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO 0x00f3: 0x2264, # LESS-THAN OR EQUAL TO 0x00f4: 0x2320, # TOP HALF INTEGRAL 0x00f5: 0x2321, # BOTTOM HALF INTEGRAL 0x00f6: 0x00f7, # DIVISION SIGN 0x00f7: 0x2248, # ALMOST EQUAL TO 0x00f8: 0x00b0, # DEGREE SIGN 0x00f9: 0x2219, # BULLET OPERATOR 0x00fa: 0x00b7, # MIDDLE DOT 0x00fb: 0x221a, # SQUARE ROOT 0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N 0x00fd: 0x00b2, # SUPERSCRIPT TWO 0x00fe: 0x25a0, # BLACK SQUARE 0x00ff: 0x00a0, # NO-BREAK SPACE }) ### Decoding Table decoding_table = ( '\x00' # 0x0000 -> NULL '\x01' # 0x0001 -> START OF HEADING '\x02' # 0x0002 -> START OF TEXT '\x03' # 0x0003 -> END OF TEXT '\x04' # 0x0004 -> END OF TRANSMISSION '\x05' # 0x0005 -> ENQUIRY '\x06' # 0x0006 -> ACKNOWLEDGE '\x07' # 0x0007 -> BELL '\x08' # 0x0008 -> BACKSPACE '\t' # 0x0009 -> HORIZONTAL TABULATION '\n' # 0x000a -> LINE FEED '\x0b' # 0x000b -> VERTICAL TABULATION '\x0c' # 0x000c -> FORM FEED '\r' # 0x000d -> CARRIAGE RETURN '\x0e' # 0x000e -> SHIFT OUT '\x0f' # 0x000f -> SHIFT IN '\x10' # 0x0010 -> DATA LINK ESCAPE '\x11' # 0x0011 -> DEVICE CONTROL ONE '\x12' # 0x0012 -> DEVICE CONTROL TWO '\x13' # 0x0013 -> DEVICE CONTROL THREE '\x14' # 0x0014 -> DEVICE CONTROL FOUR '\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE '\x16' # 0x0016 -> SYNCHRONOUS IDLE '\x17' # 0x0017 -> END OF TRANSMISSION BLOCK '\x18' # 0x0018 -> CANCEL '\x19' # 0x0019 -> END OF MEDIUM '\x1a' # 0x001a -> SUBSTITUTE '\x1b' # 0x001b -> ESCAPE '\x1c' # 0x001c -> FILE SEPARATOR '\x1d' # 0x001d -> GROUP SEPARATOR '\x1e' # 0x001e -> RECORD SEPARATOR '\x1f' # 0x001f -> UNIT SEPARATOR ' ' # 0x0020 -> SPACE '!' # 0x0021 -> EXCLAMATION MARK '"' # 0x0022 -> QUOTATION MARK '#' # 0x0023 -> NUMBER SIGN '$' # 0x0024 -> DOLLAR SIGN '%' # 0x0025 -> PERCENT SIGN '&' # 0x0026 -> AMPERSAND "'" # 0x0027 -> APOSTROPHE '(' # 0x0028 -> LEFT PARENTHESIS ')' # 0x0029 -> RIGHT PARENTHESIS '*' # 0x002a -> ASTERISK '+' # 0x002b -> PLUS SIGN ',' # 0x002c -> COMMA '-' # 0x002d -> HYPHEN-MINUS '.' # 0x002e -> FULL STOP '/' # 0x002f -> SOLIDUS '0' # 0x0030 -> DIGIT ZERO '1' # 0x0031 -> DIGIT ONE '2' # 0x0032 -> DIGIT TWO '3' # 0x0033 -> DIGIT THREE '4' # 0x0034 -> DIGIT FOUR '5' # 0x0035 -> DIGIT FIVE '6' # 0x0036 -> DIGIT SIX '7' # 0x0037 -> DIGIT SEVEN '8' # 0x0038 -> DIGIT EIGHT '9' # 0x0039 -> DIGIT NINE ':' # 0x003a -> COLON ';' # 0x003b -> SEMICOLON '<' # 0x003c -> LESS-THAN SIGN '=' # 0x003d -> EQUALS SIGN '>' # 0x003e -> GREATER-THAN SIGN '?' # 0x003f -> QUESTION MARK '@' # 0x0040 -> COMMERCIAL AT 'A' # 0x0041 -> LATIN CAPITAL LETTER A 'B' # 0x0042 -> LATIN CAPITAL LETTER B 'C' # 0x0043 -> LATIN CAPITAL LETTER C 'D' # 0x0044 -> LATIN CAPITAL LETTER D 'E' # 0x0045 -> LATIN CAPITAL LETTER E 'F' # 0x0046 -> LATIN CAPITAL LETTER F 'G' # 0x0047 -> LATIN CAPITAL LETTER G 'H' # 0x0048 -> LATIN CAPITAL LETTER H 'I' # 0x0049 -> LATIN CAPITAL LETTER I 'J' # 0x004a -> LATIN CAPITAL LETTER J 'K' # 0x004b -> LATIN CAPITAL LETTER K 'L' # 0x004c -> LATIN CAPITAL LETTER L 'M' # 0x004d -> LATIN CAPITAL LETTER M 'N' # 0x004e -> LATIN CAPITAL LETTER N 'O' # 0x004f -> LATIN CAPITAL LETTER O 'P' # 0x0050 -> LATIN CAPITAL LETTER P 'Q' # 0x0051 -> LATIN CAPITAL LETTER Q 'R' # 0x0052 -> LATIN CAPITAL LETTER R 'S' # 0x0053 -> LATIN CAPITAL LETTER S 'T' # 0x0054 -> LATIN CAPITAL LETTER T 'U' # 0x0055 -> LATIN CAPITAL LETTER U 'V' # 0x0056 -> LATIN CAPITAL LETTER V 'W' # 0x0057 -> LATIN CAPITAL LETTER W 'X' # 0x0058 -> LATIN CAPITAL LETTER X 'Y' # 0x0059 -> LATIN CAPITAL LETTER Y 'Z' # 0x005a -> LATIN CAPITAL LETTER Z '[' # 0x005b -> LEFT SQUARE BRACKET '\\' # 0x005c -> REVERSE SOLIDUS ']' # 0x005d -> RIGHT SQUARE BRACKET '^' # 0x005e -> CIRCUMFLEX ACCENT '_' # 0x005f -> LOW LINE '`' # 0x0060 -> GRAVE ACCENT 'a' # 0x0061 -> LATIN SMALL LETTER A 'b' # 0x0062 -> LATIN SMALL LETTER B 'c' # 0x0063 -> LATIN SMALL LETTER C 'd' # 0x0064 -> LATIN SMALL LETTER D 'e' # 0x0065 -> LATIN SMALL LETTER E 'f' # 0x0066 -> LATIN SMALL LETTER F 'g' # 0x0067 -> LATIN SMALL LETTER G 'h' # 0x0068 -> LATIN SMALL LETTER H 'i' # 0x0069 -> LATIN SMALL LETTER I 'j' # 0x006a -> LATIN SMALL LETTER J 'k' # 0x006b -> LATIN SMALL LETTER K 'l' # 0x006c -> LATIN SMALL LETTER L 'm' # 0x006d -> LATIN SMALL LETTER M 'n' # 0x006e -> LATIN SMALL LETTER N 'o' # 0x006f -> LATIN SMALL LETTER O 'p' # 0x0070 -> LATIN SMALL LETTER P 'q' # 0x0071 -> LATIN SMALL LETTER Q 'r' # 0x0072 -> LATIN SMALL LETTER R 's' # 0x0073 -> LATIN SMALL LETTER S 't' # 0x0074 -> LATIN SMALL LETTER T 'u' # 0x0075 -> LATIN SMALL LETTER U 'v' # 0x0076 -> LATIN SMALL LETTER V 'w' # 0x0077 -> LATIN SMALL LETTER W 'x' # 0x0078 -> LATIN SMALL LETTER X 'y' # 0x0079 -> LATIN SMALL LETTER Y 'z' # 0x007a -> LATIN SMALL LETTER Z '{' # 0x007b -> LEFT CURLY BRACKET '|' # 0x007c -> VERTICAL LINE '}' # 0x007d -> RIGHT CURLY BRACKET '~' # 0x007e -> TILDE '\x7f' # 0x007f -> DELETE '\u05d0' # 0x0080 -> HEBREW LETTER ALEF '\u05d1' # 0x0081 -> HEBREW LETTER BET '\u05d2' # 0x0082 -> HEBREW LETTER GIMEL '\u05d3' # 0x0083 -> HEBREW LETTER DALET '\u05d4' # 0x0084 -> HEBREW LETTER HE '\u05d5' # 0x0085 -> HEBREW LETTER VAV '\u05d6' # 0x0086 -> HEBREW LETTER ZAYIN '\u05d7' # 0x0087 -> HEBREW LETTER HET '\u05d8' # 0x0088 -> HEBREW LETTER TET '\u05d9' # 0x0089 -> HEBREW LETTER YOD '\u05da' # 0x008a -> HEBREW LETTER FINAL KAF '\u05db' # 0x008b -> HEBREW LETTER KAF '\u05dc' # 0x008c -> HEBREW LETTER LAMED '\u05dd' # 0x008d -> HEBREW LETTER FINAL MEM '\u05de' # 0x008e -> HEBREW LETTER MEM '\u05df' # 0x008f -> HEBREW LETTER FINAL NUN '\u05e0' # 0x0090 -> HEBREW LETTER NUN '\u05e1' # 0x0091 -> HEBREW LETTER SAMEKH '\u05e2' # 0x0092 -> HEBREW LETTER AYIN '\u05e3' # 0x0093 -> HEBREW LETTER FINAL PE '\u05e4' # 0x0094 -> HEBREW LETTER PE '\u05e5' # 0x0095 -> HEBREW LETTER FINAL TSADI '\u05e6' # 0x0096 -> HEBREW LETTER TSADI '\u05e7' # 0x0097 -> HEBREW LETTER QOF '\u05e8' # 0x0098 -> HEBREW LETTER RESH '\u05e9' # 0x0099 -> HEBREW LETTER SHIN '\u05ea' # 0x009a -> HEBREW LETTER TAV '\xa2' # 0x009b -> CENT SIGN '\xa3' # 0x009c -> POUND SIGN '\xa5' # 0x009d -> YEN SIGN '\u20a7' # 0x009e -> PESETA SIGN '\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK '\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE '\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE '\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE '\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE '\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE '\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE '\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR '\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR '\xbf' # 0x00a8 -> INVERTED QUESTION MARK '\u2310' # 0x00a9 -> REVERSED NOT SIGN '\xac' # 0x00aa -> NOT SIGN '\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF '\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER '\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK '\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK '\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK '\u2591' # 0x00b0 -> LIGHT SHADE '\u2592' # 0x00b1 -> MEDIUM SHADE '\u2593' # 0x00b2 -> DARK SHADE '\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL '\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT '\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE '\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE '\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE '\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE '\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT '\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL '\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT '\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT '\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE '\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE '\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT '\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT '\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL '\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL '\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT '\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL '\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL '\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE '\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE '\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT '\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT '\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL '\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL '\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT '\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL '\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL '\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE '\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE '\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE '\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE '\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE '\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE '\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE '\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE '\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE '\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE '\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT '\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT '\u2588' # 0x00db -> FULL BLOCK '\u2584' # 0x00dc -> LOWER HALF BLOCK '\u258c' # 0x00dd -> LEFT HALF BLOCK '\u2590' # 0x00de -> RIGHT HALF BLOCK '\u2580' # 0x00df -> UPPER HALF BLOCK '\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA '\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S (GERMAN) '\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA '\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI '\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA '\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA '\xb5' # 0x00e6 -> MICRO SIGN '\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU '\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI '\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA '\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA '\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA '\u221e' # 0x00ec -> INFINITY '\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI '\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON '\u2229' # 0x00ef -> INTERSECTION '\u2261' # 0x00f0 -> IDENTICAL TO '\xb1' # 0x00f1 -> PLUS-MINUS SIGN '\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO '\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO '\u2320' # 0x00f4 -> TOP HALF INTEGRAL '\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL '\xf7' # 0x00f6 -> DIVISION SIGN '\u2248' # 0x00f7 -> ALMOST EQUAL TO '\xb0' # 0x00f8 -> DEGREE SIGN '\u2219' # 0x00f9 -> BULLET OPERATOR '\xb7' # 0x00fa -> MIDDLE DOT '\u221a' # 0x00fb -> SQUARE ROOT '\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N '\xb2' # 0x00fd -> SUPERSCRIPT TWO '\u25a0' # 0x00fe -> BLACK SQUARE '\xa0' # 0x00ff -> NO-BREAK SPACE ) ### Encoding Map encoding_map = { 0x0000: 0x0000, # NULL 0x0001: 0x0001, # START OF HEADING 0x0002: 0x0002, # START OF TEXT 0x0003: 0x0003, # END OF TEXT 0x0004: 0x0004, # END OF TRANSMISSION 0x0005: 0x0005, # ENQUIRY 0x0006: 0x0006, # ACKNOWLEDGE 0x0007: 0x0007, # BELL 0x0008: 0x0008, # BACKSPACE 0x0009: 0x0009, # HORIZONTAL TABULATION 0x000a: 0x000a, # LINE FEED 0x000b: 0x000b, # VERTICAL TABULATION 0x000c: 0x000c, # FORM FEED 0x000d: 0x000d, # CARRIAGE RETURN 0x000e: 0x000e, # SHIFT OUT 0x000f: 0x000f, # SHIFT IN 0x0010: 0x0010, # DATA LINK ESCAPE 0x0011: 0x0011, # DEVICE CONTROL ONE 0x0012: 0x0012, # DEVICE CONTROL TWO 0x0013: 0x0013, # DEVICE CONTROL THREE 0x0014: 0x0014, # DEVICE CONTROL FOUR 0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE 0x0016: 0x0016, # SYNCHRONOUS IDLE 0x0017: 0x0017, # END OF TRANSMISSION BLOCK 0x0018: 0x0018, # CANCEL 0x0019: 0x0019, # END OF MEDIUM 0x001a: 0x001a, # SUBSTITUTE 0x001b: 0x001b, # ESCAPE 0x001c: 0x001c, # FILE SEPARATOR 0x001d: 0x001d, # GROUP SEPARATOR 0x001e: 0x001e, # RECORD SEPARATOR 0x001f: 0x001f, # UNIT SEPARATOR 0x0020: 0x0020, # SPACE 0x0021: 0x0021, # EXCLAMATION MARK 0x0022: 0x0022, # QUOTATION MARK 0x0023: 0x0023, # NUMBER SIGN 0x0024: 0x0024, # DOLLAR SIGN 0x0025: 0x0025, # PERCENT SIGN 0x0026: 0x0026, # AMPERSAND 0x0027: 0x0027, # APOSTROPHE 0x0028: 0x0028, # LEFT PARENTHESIS 0x0029: 0x0029, # RIGHT PARENTHESIS 0x002a: 0x002a, # ASTERISK 0x002b: 0x002b, # PLUS SIGN 0x002c: 0x002c, # COMMA 0x002d: 0x002d, # HYPHEN-MINUS 0x002e: 0x002e, # FULL STOP 0x002f: 0x002f, # SOLIDUS 0x0030: 0x0030, # DIGIT ZERO 0x0031: 0x0031, # DIGIT ONE 0x0032: 0x0032, # DIGIT TWO 0x0033: 0x0033, # DIGIT THREE 0x0034: 0x0034, # DIGIT FOUR 0x0035: 0x0035, # DIGIT FIVE 0x0036: 0x0036, # DIGIT SIX 0x0037: 0x0037, # DIGIT SEVEN 0x0038: 0x0038, # DIGIT EIGHT 0x0039: 0x0039, # DIGIT NINE 0x003a: 0x003a, # COLON 0x003b: 0x003b, # SEMICOLON 0x003c: 0x003c, # LESS-THAN SIGN 0x003d: 0x003d, # EQUALS SIGN 0x003e: 0x003e, # GREATER-THAN SIGN 0x003f: 0x003f, # QUESTION MARK 0x0040: 0x0040, # COMMERCIAL AT 0x0041: 0x0041, # LATIN CAPITAL LETTER A 0x0042: 0x0042, # LATIN CAPITAL LETTER B 0x0043: 0x0043, # LATIN CAPITAL LETTER C 0x0044: 0x0044, # LATIN CAPITAL LETTER D 0x0045: 0x0045, # LATIN CAPITAL LETTER E 0x0046: 0x0046, # LATIN CAPITAL LETTER F 0x0047: 0x0047, # LATIN CAPITAL LETTER G 0x0048: 0x0048, # LATIN CAPITAL LETTER H 0x0049: 0x0049, # LATIN CAPITAL LETTER I 0x004a: 0x004a, # LATIN CAPITAL LETTER J 0x004b: 0x004b, # LATIN CAPITAL LETTER K 0x004c: 0x004c, # LATIN CAPITAL LETTER L 0x004d: 0x004d, # LATIN CAPITAL LETTER M 0x004e: 0x004e, # LATIN CAPITAL LETTER N 0x004f: 0x004f, # LATIN CAPITAL LETTER O 0x0050: 0x0050, # LATIN CAPITAL LETTER P 0x0051: 0x0051, # LATIN CAPITAL LETTER Q 0x0052: 0x0052, # LATIN CAPITAL LETTER R 0x0053: 0x0053, # LATIN CAPITAL LETTER S 0x0054: 0x0054, # LATIN CAPITAL LETTER T 0x0055: 0x0055, # LATIN CAPITAL LETTER U 0x0056: 0x0056, # LATIN CAPITAL LETTER V 0x0057: 0x0057, # LATIN CAPITAL LETTER W 0x0058: 0x0058, # LATIN CAPITAL LETTER X 0x0059: 0x0059, # LATIN CAPITAL LETTER Y 0x005a: 0x005a, # LATIN CAPITAL LETTER Z 0x005b: 0x005b, # LEFT SQUARE BRACKET 0x005c: 0x005c, # REVERSE SOLIDUS 0x005d: 0x005d, # RIGHT SQUARE BRACKET 0x005e: 0x005e, # CIRCUMFLEX ACCENT 0x005f: 0x005f, # LOW LINE 0x0060: 0x0060, # GRAVE ACCENT 0x0061: 0x0061, # LATIN SMALL LETTER A 0x0062: 0x0062, # LATIN SMALL LETTER B 0x0063: 0x0063, # LATIN SMALL LETTER C 0x0064: 0x0064, # LATIN SMALL LETTER D 0x0065: 0x0065, # LATIN SMALL LETTER E 0x0066: 0x0066, # LATIN SMALL LETTER F 0x0067: 0x0067, # LATIN SMALL LETTER G 0x0068: 0x0068, # LATIN SMALL LETTER H 0x0069: 0x0069, # LATIN SMALL LETTER I 0x006a: 0x006a, # LATIN SMALL LETTER J 0x006b: 0x006b, # LATIN SMALL LETTER K 0x006c: 0x006c, # LATIN SMALL LETTER L 0x006d: 0x006d, # LATIN SMALL LETTER M 0x006e: 0x006e, # LATIN SMALL LETTER N 0x006f: 0x006f, # LATIN SMALL LETTER O 0x0070: 0x0070, # LATIN SMALL LETTER P 0x0071: 0x0071, # LATIN SMALL LETTER Q 0x0072: 0x0072, # LATIN SMALL LETTER R 0x0073: 0x0073, # LATIN SMALL LETTER S 0x0074: 0x0074, # LATIN SMALL LETTER T 0x0075: 0x0075, # LATIN SMALL LETTER U 0x0076: 0x0076, # LATIN SMALL LETTER V 0x0077: 0x0077, # LATIN SMALL LETTER W 0x0078: 0x0078, # LATIN SMALL LETTER X 0x0079: 0x0079, # LATIN SMALL LETTER Y 0x007a: 0x007a, # LATIN SMALL LETTER Z 0x007b: 0x007b, # LEFT CURLY BRACKET 0x007c: 0x007c, # VERTICAL LINE 0x007d: 0x007d, # RIGHT CURLY BRACKET 0x007e: 0x007e, # TILDE 0x007f: 0x007f, # DELETE 0x00a0: 0x00ff, # NO-BREAK SPACE 0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK 0x00a2: 0x009b, # CENT SIGN 0x00a3: 0x009c, # POUND SIGN 0x00a5: 0x009d, # YEN SIGN 0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR 0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00ac: 0x00aa, # NOT SIGN 0x00b0: 0x00f8, # DEGREE SIGN 0x00b1: 0x00f1, # PLUS-MINUS SIGN 0x00b2: 0x00fd, # SUPERSCRIPT TWO 0x00b5: 0x00e6, # MICRO SIGN 0x00b7: 0x00fa, # MIDDLE DOT 0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR 0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK 0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER 0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF 0x00bf: 0x00a8, # INVERTED QUESTION MARK 0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE 0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S (GERMAN) 0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE 0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE 0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE 0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE 0x00f7: 0x00f6, # DIVISION SIGN 0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE 0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK 0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA 0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA 0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA 0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI 0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA 0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA 0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA 0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON 0x03c0: 0x00e3, # GREEK SMALL LETTER PI 0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA 0x03c4: 0x00e7, # GREEK SMALL LETTER TAU 0x03c6: 0x00ed, # GREEK SMALL LETTER PHI 0x05d0: 0x0080, # HEBREW LETTER ALEF 0x05d1: 0x0081, # HEBREW LETTER BET 0x05d2: 0x0082, # HEBREW LETTER GIMEL 0x05d3: 0x0083, # HEBREW LETTER DALET 0x05d4: 0x0084, # HEBREW LETTER HE 0x05d5: 0x0085, # HEBREW LETTER VAV 0x05d6: 0x0086, # HEBREW LETTER ZAYIN 0x05d7: 0x0087, # HEBREW LETTER HET 0x05d8: 0x0088, # HEBREW LETTER TET 0x05d9: 0x0089, # HEBREW LETTER YOD 0x05da: 0x008a, # HEBREW LETTER FINAL KAF 0x05db: 0x008b, # HEBREW LETTER KAF 0x05dc: 0x008c, # HEBREW LETTER LAMED 0x05dd: 0x008d, # HEBREW LETTER FINAL MEM 0x05de: 0x008e, # HEBREW LETTER MEM 0x05df: 0x008f, # HEBREW LETTER FINAL NUN 0x05e0: 0x0090, # HEBREW LETTER NUN 0x05e1: 0x0091, # HEBREW LETTER SAMEKH 0x05e2: 0x0092, # HEBREW LETTER AYIN 0x05e3: 0x0093, # HEBREW LETTER FINAL PE 0x05e4: 0x0094, # HEBREW LETTER PE 0x05e5: 0x0095, # HEBREW LETTER FINAL TSADI 0x05e6: 0x0096, # HEBREW LETTER TSADI 0x05e7: 0x0097, # HEBREW LETTER QOF 0x05e8: 0x0098, # HEBREW LETTER RESH 0x05e9: 0x0099, # HEBREW LETTER SHIN 0x05ea: 0x009a, # HEBREW LETTER TAV 0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N 0x20a7: 0x009e, # PESETA SIGN 0x2219: 0x00f9, # BULLET OPERATOR 0x221a: 0x00fb, # SQUARE ROOT 0x221e: 0x00ec, # INFINITY 0x2229: 0x00ef, # INTERSECTION 0x2248: 0x00f7, # ALMOST EQUAL TO 0x2261: 0x00f0, # IDENTICAL TO 0x2264: 0x00f3, # LESS-THAN OR EQUAL TO 0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO 0x2310: 0x00a9, # REVERSED NOT SIGN 0x2320: 0x00f4, # TOP HALF INTEGRAL 0x2321: 0x00f5, # BOTTOM HALF INTEGRAL 0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL 0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL 0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT 0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT 0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL 0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL 0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT 0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x2580: 0x00df, # UPPER HALF BLOCK 0x2584: 0x00dc, # LOWER HALF BLOCK 0x2588: 0x00db, # FULL BLOCK 0x258c: 0x00dd, # LEFT HALF BLOCK 0x2590: 0x00de, # RIGHT HALF BLOCK 0x2591: 0x00b0, # LIGHT SHADE 0x2592: 0x00b1, # MEDIUM SHADE 0x2593: 0x00b2, # DARK SHADE 0x25a0: 0x00fe, # BLACK SQUARE }
mit
leezu/mxnet
tools/bandwidth/measure.py
40
6103
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os, sys curr_path = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(curr_path, "../../python")) sys.path.insert(0, os.path.join(curr_path, "../../example/image-classification/symbols")) import mxnet as mx import logging import argparse import time import numpy as np from importlib import import_module from collections import namedtuple from functools import reduce logger = logging.getLogger() logger.setLevel(logging.INFO) def parse_args(): parser = argparse.ArgumentParser(description="command for benchmark kv-store") parser.add_argument('--network', type=str, default="resnet", help='the neural network to test') parser.add_argument('--gpus', type=str, default='0,1', help='the gpus to be used, e.g "0,1,2,3"') parser.add_argument('--num-layers', type=int, default=152, help='number of layers, can be used for resnet') parser.add_argument('--kv-store', type=str, default='device', help='the kvstore type') parser.add_argument('--num-batches', type=int, default=5, help='number of batches to run') parser.add_argument('--disp-batches', type=int, default=1, help='show averaged results for every n batches') parser.add_argument('--test-results', type=int, default=1, help='if or not evalute the results correctness') parser.add_argument('--image-shape', type=str, default='3,224,224', help='input images shape') parser.add_argument('--num-classes', type=int, default=1000, help='number of classes') parser.add_argument('--optimizer', type=str, default='None', help='the optimizer set to kvstore. None means no optimizer') parser.add_argument('--gc-type', type=str, default='none', help='type of gradient compression') args = parser.parse_args() logging.info(args) return args def get_shapes(symbol, data_shape): arg_name = symbol.list_arguments() arg_shape, _, _ = symbol.infer_shape(data=data_shape) shapes = [s for n,s in zip(arg_name, arg_shape) if 'weight' in n or 'bias' in n] return shapes def diff(a, b): return np.sum(np.abs(a.asnumpy() - b.asnumpy())) def error(gpu_res, cpu_res): res = sum([sum([diff(a, b) for a in w]) for w, b in zip(gpu_res, cpu_res)]) res /= sum([np.sum(np.abs(g.asnumpy())) for g in cpu_res]) return res def run(network, optimizer, gpus, kv_store, image_shape, disp_batches, num_batches, test_results, gc_type, **kwargs): # create kvstore and optimizer devs = [mx.gpu(int(i)) for i in gpus.split(',')] kv = mx.kv.create(kv_store) if gc_type != 'none': kv.set_gradient_compression({'type': gc_type}) if optimizer is None or optimizer == 'None': opt = None else: opt = mx.optimizer.Optimizer.create_optimizer(optimizer) kv.set_optimizer(opt) updater = mx.optimizer.get_updater(mx.optimizer.Optimizer.create_optimizer(optimizer)) # create network symbol = import_module(network).get_symbol(image_shape=image_shape, **kwargs) # a fake batch size 32, which does not affect the results data_shape = (32,) + tuple([int(s) for s in image_shape.split(',')]) shapes = get_shapes(symbol, data_shape) size = float(sum([reduce(lambda x,y : x*y, s, 1) for s in shapes])) * 4 / 1e6 logging.info('num of arrays = %d, total size = %f MB' % (len(shapes), size)) for i, s in enumerate(shapes): kv.init(i, mx.nd.zeros(s)) grads_val = [[mx.random.uniform(-1,1,shape=s) for d in devs] for s in shapes] grads = [[g.as_in_context(d) for g, d in zip(gs, devs)] for gs in grads_val] weights = [[mx.nd.zeros(s, d) for d in devs] for s in shapes] cpu_grads = [mx.nd.array(sum([g.asnumpy() for g in gs]))*kv.num_workers for gs in grads_val] cpu_weights = [mx.nd.zeros(s) for s in shapes] toc = 0 Results = namedtuple('Results', ['iter', 'time', 'bandwidth', 'error']) res = [] for b in range(0, num_batches+1): tic = time.time() for i,g in enumerate(grads): kv.push(i, g, i) for i,w in enumerate(weights): kv.pull(i, w, i) for ws in weights: for w in ws: w.wait_to_read() toc += time.time() - tic if test_results: if opt == None: err = error(weights, cpu_grads) else: for i, wg in enumerate(zip(cpu_weights, cpu_grads)): updater(i, wg[1], wg[0]) err = error(weights, cpu_weights) else: err = -1 if b % disp_batches == 0: toc /= disp_batches if b != 0: # 0 is used for warmup, ignored r = Results(iter=b, time=toc, error=err, bandwidth=size*2*(len(devs)-1)/len(devs)/toc/1e3) logging.info('iter %d, %f sec, %f GB/sec per gpu, error %f' % ( r.iter, r.time, r.bandwidth, r.error)) res.append(r) toc = 0 return res if __name__ == "__main__": args = parse_args(); run(**vars(args))
apache-2.0
stclair/wes-cms
djangoappengine/management/commands/runserver.py
14
3475
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import sys from django.db import connections from ...boot import PROJECT_DIR from ...db.base import DatabaseWrapper from django.core.management.base import BaseCommand from django.core.exceptions import ImproperlyConfigured def start_dev_appserver(argv): """Starts the App Engine dev_appserver program for the Django project. The appserver is run with default parameters. If you need to pass any special parameters to the dev_appserver you will have to invoke it manually. """ from google.appengine.tools import dev_appserver_main progname = argv[0] args = [] # hack __main__ so --help in dev_appserver_main works OK. sys.modules['__main__'] = dev_appserver_main # Set bind ip/port if specified. addr, port = None, '8000' if len(argv) > 2: if not argv[2].startswith('-'): addrport = argv[2] try: addr, port = addrport.split(":") except ValueError: addr = addrport else: args.append(argv[2]) args.extend(argv[3:]) if addr: args.extend(["--address", addr]) if port: args.extend(["--port", port]) # Add email settings from django.conf import settings if '--smtp_host' not in args and '--enable_sendmail' not in args: args.extend(['--smtp_host', settings.EMAIL_HOST, '--smtp_port', str(settings.EMAIL_PORT), '--smtp_user', settings.EMAIL_HOST_USER, '--smtp_password', settings.EMAIL_HOST_PASSWORD]) # Pass the application specific datastore location to the server. for name in connections: connection = connections[name] if isinstance(connection, DatabaseWrapper): p = connection._get_paths() if '--datastore_path' not in args: args.extend(['--datastore_path', p[0]]) if '--blobstore_path' not in args: args.extend(['--blobstore_path', p[1]]) if '--history_path' not in args: args.extend(['--history_path', p[2]]) break # Reset logging level to INFO as dev_appserver will spew tons of debug logs logging.getLogger().setLevel(logging.INFO) # Append the current working directory to the arguments. dev_appserver_main.main([progname] + args + [PROJECT_DIR]) class Command(BaseCommand): """Overrides the default Django runserver command. Instead of starting the default Django development server this command fires up a copy of the full fledged App Engine dev_appserver that emulates the live environment your application will be deployed to. """ help = 'Runs a copy of the App Engine development server.' args = '[optional port number, or ipaddr:port]' def run_from_argv(self, argv): start_dev_appserver(argv)
bsd-3-clause
alexschiller/osf.io
addons/dataverse/migrations/0001_initial.py
4
2816
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2016-10-31 20:25 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import osf.models.base import osf.utils.datetime_aware_jsonfield class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('osf', '0013_auto_20161028_1014'), ] operations = [ migrations.CreateModel( name='NodeSettings', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)), ('deleted', models.BooleanField(default=False)), ('dataverse_alias', models.TextField(blank=True, null=True)), ('dataverse', models.TextField(blank=True, null=True)), ('dataset_doi', models.TextField(blank=True, null=True)), ('_dataset_id', models.TextField(blank=True, null=True)), ('dataset', models.TextField(blank=True, null=True)), ('external_account', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_dataverse_node_settings', to='osf.ExternalAccount')), ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_dataverse_node_settings', to='osf.AbstractNode')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='UserSettings', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)), ('deleted', models.BooleanField(default=False)), ('oauth_grants', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=dict)), ('owner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='addons_dataverse_user_settings', to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='nodesettings', name='user_settings', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='addons_dataverse.UserSettings'), ), ]
apache-2.0
HonzaKral/django
django/contrib/postgres/forms/array.py
258
6743
import copy from django import forms from django.contrib.postgres.validators import ( ArrayMaxLengthValidator, ArrayMinLengthValidator, ) from django.core.exceptions import ValidationError from django.utils import six from django.utils.safestring import mark_safe from django.utils.translation import string_concat, ugettext_lazy as _ class SimpleArrayField(forms.CharField): default_error_messages = { 'item_invalid': _('Item %(nth)s in the array did not validate: '), } def __init__(self, base_field, delimiter=',', max_length=None, min_length=None, *args, **kwargs): self.base_field = base_field self.delimiter = delimiter super(SimpleArrayField, self).__init__(*args, **kwargs) if min_length is not None: self.min_length = min_length self.validators.append(ArrayMinLengthValidator(int(min_length))) if max_length is not None: self.max_length = max_length self.validators.append(ArrayMaxLengthValidator(int(max_length))) def prepare_value(self, value): if isinstance(value, list): return self.delimiter.join(six.text_type(self.base_field.prepare_value(v)) for v in value) return value def to_python(self, value): if value: items = value.split(self.delimiter) else: items = [] errors = [] values = [] for i, item in enumerate(items): try: values.append(self.base_field.to_python(item)) except ValidationError as e: for error in e.error_list: errors.append(ValidationError( string_concat(self.error_messages['item_invalid'], error.message), code='item_invalid', params={'nth': i}, )) if errors: raise ValidationError(errors) return values def validate(self, value): super(SimpleArrayField, self).validate(value) errors = [] for i, item in enumerate(value): try: self.base_field.validate(item) except ValidationError as e: for error in e.error_list: errors.append(ValidationError( string_concat(self.error_messages['item_invalid'], error.message), code='item_invalid', params={'nth': i}, )) if errors: raise ValidationError(errors) def run_validators(self, value): super(SimpleArrayField, self).run_validators(value) errors = [] for i, item in enumerate(value): try: self.base_field.run_validators(item) except ValidationError as e: for error in e.error_list: errors.append(ValidationError( string_concat(self.error_messages['item_invalid'], error.message), code='item_invalid', params={'nth': i}, )) if errors: raise ValidationError(errors) class SplitArrayWidget(forms.Widget): def __init__(self, widget, size, **kwargs): self.widget = widget() if isinstance(widget, type) else widget self.size = size super(SplitArrayWidget, self).__init__(**kwargs) @property def is_hidden(self): return self.widget.is_hidden def value_from_datadict(self, data, files, name): return [self.widget.value_from_datadict(data, files, '%s_%s' % (name, index)) for index in range(self.size)] def id_for_label(self, id_): # See the comment for RadioSelect.id_for_label() if id_: id_ += '_0' return id_ def render(self, name, value, attrs=None): if self.is_localized: self.widget.is_localized = self.is_localized value = value or [] output = [] final_attrs = self.build_attrs(attrs) id_ = final_attrs.get('id') for i in range(max(len(value), self.size)): try: widget_value = value[i] except IndexError: widget_value = None if id_: final_attrs = dict(final_attrs, id='%s_%s' % (id_, i)) output.append(self.widget.render(name + '_%s' % i, widget_value, final_attrs)) return mark_safe(self.format_output(output)) def format_output(self, rendered_widgets): return ''.join(rendered_widgets) @property def media(self): return self.widget.media def __deepcopy__(self, memo): obj = super(SplitArrayWidget, self).__deepcopy__(memo) obj.widget = copy.deepcopy(self.widget) return obj @property def needs_multipart_form(self): return self.widget.needs_multipart_form class SplitArrayField(forms.Field): default_error_messages = { 'item_invalid': _('Item %(nth)s in the array did not validate: '), } def __init__(self, base_field, size, remove_trailing_nulls=False, **kwargs): self.base_field = base_field self.size = size self.remove_trailing_nulls = remove_trailing_nulls widget = SplitArrayWidget(widget=base_field.widget, size=size) kwargs.setdefault('widget', widget) super(SplitArrayField, self).__init__(**kwargs) def clean(self, value): cleaned_data = [] errors = [] if not any(value) and self.required: raise ValidationError(self.error_messages['required']) max_size = max(self.size, len(value)) for i in range(max_size): item = value[i] try: cleaned_data.append(self.base_field.clean(item)) errors.append(None) except ValidationError as error: errors.append(ValidationError( string_concat(self.error_messages['item_invalid'], error.message), code='item_invalid', params={'nth': i}, )) cleaned_data.append(None) if self.remove_trailing_nulls: null_index = None for i, value in reversed(list(enumerate(cleaned_data))): if value in self.base_field.empty_values: null_index = i else: break if null_index: cleaned_data = cleaned_data[:null_index] errors = errors[:null_index] errors = list(filter(None, errors)) if errors: raise ValidationError(errors) return cleaned_data
bsd-3-clause
mikeireland/chronostar
chronostar/readparam.py
1
5116
""" This module defines the parameter reading function. Used to parse a text file into a dictionary to be passed to datatool.prepare_data() It also defines the default parameters for prepare_data() Credit: Mark Krumholz Notes ----- Probably should transition to YAML format, and utilise pre-existing libraries for parameter input. """ # python3 throws FileNotFoundError that is essentially the same as IOError try: FileNotFoundError except NameError: FileNotFoundError = IOError def log_used_pars(custom_pars, default_pars=None): """ Write parameter record to file, making a note which have been changed. Parameters ---------- custom_pars : dict A dict that has been generated by `readParam` Returns ------- None Side effects ------------ Writes a file to `data_pars['par_log_file']` """ if default_pars is None: default_pars = {} # update defaults (no change if already performed) combined_pars = dict(default_pars) combined_pars.update(custom_pars) # data_pars = update_data_defaults(data_pars, default) with open(combined_pars['par_log_file'], 'w') as fp: fp.write('# Parameters used\n\n') for k in sorted(combined_pars.keys()): if k not in default_pars.keys(): msg = '# [NO PROVIDED DEFAULT]' elif combined_pars[k] != default_pars[k]: msg = '# [CHANGED]' else: msg = '' line = '{:25} = {:45} {}\n'.format(k, str(combined_pars[k]), msg) fp.write(line.replace("'",'')) def readParam(param_file, default_pars=None, noCheck=False): """ This function reads a parameter file. Parameters ---------- param_file : string A string giving the name of the parameter file noCheck : bool If True, no checking is performed to make sure that all mandatory parameters have been specified Returns ------- param_dict : dict A dict containing a parsed representation of the input file Notes ----- TODO: Work out how to format input for synthetic association TODO: maybe just dont? And require the use of a script to intialise things? """ if default_pars is None: default_pars = {} # Prepare an empty dict to hold inputs custom_pars = {} # Try to open the file with open(param_file, 'r') as fp: # Read the file for line in fp: # Skip blank and comment lines if line == '\n': continue if line.strip()[0] == "#": continue # Break line up based on equal sign linesplit = line.split("=") if len(linesplit) < 2: print("Error parsing input line: " + line) raise IOError # Trim trailing comments from portion after equal sign linesplit2 = linesplit[1].split('#') # If right side has a non empty value, then we # store token-value pairs, as strings for now. # Type conversion happens below. if linesplit2[0].strip() != '': custom_pars[linesplit[0].strip()] = linesplit2[0].strip() # Try converting parameters to bools or numbers, for convenience for k in custom_pars.keys(): try: custom_pars[k] = int(custom_pars[k]) except ValueError: try: custom_pars[k] = float(custom_pars[k]) except ValueError: pass # Order is important, as int(True) -> 1 try: if custom_pars[k].lower() == 'true': custom_pars[k] = True elif custom_pars[k].lower() == 'false': custom_pars[k] = False except AttributeError: pass # Find any lists (of floats) and convert accordingly # Assumes first char is '[' and last char is ']' # Can allow for trailing ',' for k in custom_pars.keys(): try: if custom_pars[k][0] == '[': # First build list of strings custom_pars[k] = [val.strip() for val in custom_pars[k][1:-1].split(',') if val.strip()] # Then try converting to floats try: custom_pars[k] = [float(val) for val in custom_pars[k]] except ValueError: pass except (TypeError, IndexError): pass # Now that we have collected custom parameters into a dictionary, # Make copy of default parameters, and update combined_pars = dict(default_pars) combined_pars.update(custom_pars) # if not noCheck: # mandatory = ['alpha', 'gamma', 'ibc_pres_type', 'ibc_enth_type', # 'ibc_pres_val', 'obc_pres_type', 'obc_enth_type', # 'obc_pres_val'] # for m in mandatory: # if not m in param_dict: # raise ValueError("Error: must specify parameter " + m + "!\n") return combined_pars
mit
ikaee/bfr-attendant
facerecognitionlibrary/jni-build/jni/include/tensorflow/contrib/session_bundle/example/export_half_plus_two.py
44
6027
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Exports a toy linear regression inference graph. Exports a TensorFlow graph to /tmp/half_plus_two/ based on the Exporter format. This graph calculates, y = a*x + b where a and b are variables with a=0.5 and b=2. Output from this program is typically used to exercise Session loading and execution code. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import sys import tensorflow as tf from tensorflow.contrib.session_bundle import exporter FLAGS = None def Export(export_dir, use_checkpoint_v2): with tf.Session() as sess: # Make model parameters a&b variables instead of constants to # exercise the variable reloading mechanisms. a = tf.Variable(0.5, name="a") b = tf.Variable(2.0, name="b") # Create a placeholder for serialized tensorflow.Example messages to be fed. serialized_tf_example = tf.placeholder(tf.string, name="tf_example") # Parse the tensorflow.Example looking for a feature named "x" with a single # floating point value. feature_configs = {"x": tf.FixedLenFeature([1], dtype=tf.float32),} tf_example = tf.parse_example(serialized_tf_example, feature_configs) # Use tf.identity() to assign name x = tf.identity(tf_example["x"], name="x") # Calculate, y = a*x + b y = tf.add(tf.multiply(a, x), b, name="y") # Setup a standard Saver for our variables. save = tf.train.Saver( { "a": a, "b": b }, sharded=True, write_version=tf.train.SaverDef.V2 if use_checkpoint_v2 else tf.train.SaverDef.V1) # asset_path contains the base directory of assets used in training (e.g. # vocabulary files). original_asset_path = tf.constant("/tmp/original/export/assets") # Ops reading asset files should reference the asset_path tensor # which stores the original asset path at training time and the # overridden assets directory at restore time. asset_path = tf.Variable(original_asset_path, name="asset_path", trainable=False, collections=[]) assign_asset_path = asset_path.assign(original_asset_path) # Use a fixed global step number. global_step_tensor = tf.Variable(123, name="global_step") # Create a RegressionSignature for our input and output. regression_signature = exporter.regression_signature( input_tensor=serialized_tf_example, # Use tf.identity here because we export two signatures here. # Otherwise only graph for one of the signatures will be loaded # (whichever is created first) during serving. output_tensor=tf.identity(y)) named_graph_signature = { "inputs": exporter.generic_signature({"x": x}), "outputs": exporter.generic_signature({"y": y}) } # Create two filename assets and corresponding tensors. # TODO(b/26254158) Consider adding validation of file existance as well as # hashes (e.g. sha1) for consistency. original_filename1 = tf.constant("hello1.txt") tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename1) filename1 = tf.Variable(original_filename1, name="filename1", trainable=False, collections=[]) assign_filename1 = filename1.assign(original_filename1) original_filename2 = tf.constant("hello2.txt") tf.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, original_filename2) filename2 = tf.Variable(original_filename2, name="filename2", trainable=False, collections=[]) assign_filename2 = filename2.assign(original_filename2) # Init op contains a group of all variables that we assign. init_op = tf.group(assign_asset_path, assign_filename1, assign_filename2) # CopyAssets is used as a callback during export to copy files to the # given export directory. def CopyAssets(filepaths, export_path): print("copying asset files to: %s" % export_path) for filepath in filepaths: print("copying asset file: %s" % filepath) # Run an export. tf.global_variables_initializer().run() export = exporter.Exporter(save) export.init( sess.graph.as_graph_def(), init_op=init_op, default_graph_signature=regression_signature, named_graph_signatures=named_graph_signature, assets_collection=tf.get_collection(tf.GraphKeys.ASSET_FILEPATHS), assets_callback=CopyAssets) export.export(export_dir, global_step_tensor, sess) def main(_): Export(FLAGS.export_dir, FLAGS.use_checkpoint_v2) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.register("type", "bool", lambda v: v.lower() == "true") parser.add_argument( "--export_dir", type=str, default="/tmp/half_plus_two", help="Directory where to export inference model." ) parser.add_argument( "--use_checkpoint_v2", "bool", nargs="?", const=True, default=False, help="If true, write v2 checkpoint files." ) FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
apache-2.0
cpyou/odoo
addons/decimal_precision/tests/test_qweb_float.py
335
1986
# -*- coding: utf-8 -*- from openerp.tests import common class TestFloatExport(common.TransactionCase): def setUp(self): super(TestFloatExport, self).setUp() self.Model = self.registry('decimal.precision.test') def get_converter(self, name): converter = self.registry('ir.qweb.field.float') field = self.Model._fields[name] return lambda value, options=None: converter.value_to_html( self.cr, self.uid, value, field, options=options, context=None) def test_basic_float(self): converter = self.get_converter('float') self.assertEqual( converter(42.0), "42.0") self.assertEqual( converter(42.12345), "42.12345") converter = self.get_converter('float_2') self.assertEqual( converter(42.0), "42.00") self.assertEqual( converter(42.12345), "42.12") converter = self.get_converter('float_4') self.assertEqual( converter(42.0), '42.0000') self.assertEqual( converter(42.12345), '42.1234') def test_precision_domain(self): DP = self.registry('decimal.precision') DP.create(self.cr, self.uid, { 'name': 'A', 'digits': 2, }) DP.create(self.cr, self.uid, { 'name': 'B', 'digits': 6, }) converter = self.get_converter('float') self.assertEqual( converter(42.0, {'decimal_precision': 'A'}), '42.00') self.assertEqual( converter(42.0, {'decimal_precision': 'B'}), '42.000000') converter = self.get_converter('float_4') self.assertEqual( converter(42.12345, {'decimal_precision': 'A'}), '42.12') self.assertEqual( converter(42.12345, {'decimal_precision': 'B'}), '42.123450')
agpl-3.0
scipy/scipy
scipy/linalg/decomp_lu.py
18
6748
"""LU decomposition functions.""" from warnings import warn from numpy import asarray, asarray_chkfinite # Local imports from .misc import _datacopied, LinAlgWarning from .lapack import get_lapack_funcs from .flinalg import get_flinalg_funcs __all__ = ['lu', 'lu_solve', 'lu_factor'] def lu_factor(a, overwrite_a=False, check_finite=True): """ Compute pivoted LU decomposition of a matrix. The decomposition is:: A = P L U where P is a permutation matrix, L lower triangular with unit diagonal elements, and U upper triangular. Parameters ---------- a : (M, M) array_like Matrix to decompose overwrite_a : bool, optional Whether to overwrite data in A (may increase performance) check_finite : bool, optional Whether to check that the input matrix contains only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- lu : (N, N) ndarray Matrix containing U in its upper triangle, and L in its lower triangle. The unit diagonal elements of L are not stored. piv : (N,) ndarray Pivot indices representing the permutation matrix P: row i of matrix was interchanged with row piv[i]. See also -------- lu_solve : solve an equation system using the LU factorization of a matrix Notes ----- This is a wrapper to the ``*GETRF`` routines from LAPACK. Examples -------- >>> from scipy.linalg import lu_factor >>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]]) >>> lu, piv = lu_factor(A) >>> piv array([2, 2, 3, 3], dtype=int32) Convert LAPACK's ``piv`` array to NumPy index and test the permutation >>> piv_py = [2, 0, 3, 1] >>> L, U = np.tril(lu, k=-1) + np.eye(4), np.triu(lu) >>> np.allclose(A[piv_py] - L @ U, np.zeros((4, 4))) True """ if check_finite: a1 = asarray_chkfinite(a) else: a1 = asarray(a) if len(a1.shape) != 2 or (a1.shape[0] != a1.shape[1]): raise ValueError('expected square matrix') overwrite_a = overwrite_a or (_datacopied(a1, a)) getrf, = get_lapack_funcs(('getrf',), (a1,)) lu, piv, info = getrf(a1, overwrite_a=overwrite_a) if info < 0: raise ValueError('illegal value in %dth argument of ' 'internal getrf (lu_factor)' % -info) if info > 0: warn("Diagonal number %d is exactly zero. Singular matrix." % info, LinAlgWarning, stacklevel=2) return lu, piv def lu_solve(lu_and_piv, b, trans=0, overwrite_b=False, check_finite=True): """Solve an equation system, a x = b, given the LU factorization of a Parameters ---------- (lu, piv) Factorization of the coefficient matrix a, as given by lu_factor b : array Right-hand side trans : {0, 1, 2}, optional Type of system to solve: ===== ========= trans system ===== ========= 0 a x = b 1 a^T x = b 2 a^H x = b ===== ========= overwrite_b : bool, optional Whether to overwrite data in b (may increase performance) check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- x : array Solution to the system See also -------- lu_factor : LU factorize a matrix Examples -------- >>> from scipy.linalg import lu_factor, lu_solve >>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]]) >>> b = np.array([1, 1, 1, 1]) >>> lu, piv = lu_factor(A) >>> x = lu_solve((lu, piv), b) >>> np.allclose(A @ x - b, np.zeros((4,))) True """ (lu, piv) = lu_and_piv if check_finite: b1 = asarray_chkfinite(b) else: b1 = asarray(b) overwrite_b = overwrite_b or _datacopied(b1, b) if lu.shape[0] != b1.shape[0]: raise ValueError("Shapes of lu {} and b {} are incompatible" .format(lu.shape, b1.shape)) getrs, = get_lapack_funcs(('getrs',), (lu, b1)) x, info = getrs(lu, piv, b1, trans=trans, overwrite_b=overwrite_b) if info == 0: return x raise ValueError('illegal value in %dth argument of internal gesv|posv' % -info) def lu(a, permute_l=False, overwrite_a=False, check_finite=True): """ Compute pivoted LU decomposition of a matrix. The decomposition is:: A = P L U where P is a permutation matrix, L lower triangular with unit diagonal elements, and U upper triangular. Parameters ---------- a : (M, N) array_like Array to decompose permute_l : bool, optional Perform the multiplication P*L (Default: do not permute) overwrite_a : bool, optional Whether to overwrite data in a (may improve performance) check_finite : bool, optional Whether to check that the input matrix contains only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- **(If permute_l == False)** p : (M, M) ndarray Permutation matrix l : (M, K) ndarray Lower triangular or trapezoidal matrix with unit diagonal. K = min(M, N) u : (K, N) ndarray Upper triangular or trapezoidal matrix **(If permute_l == True)** pl : (M, K) ndarray Permuted L matrix. K = min(M, N) u : (K, N) ndarray Upper triangular or trapezoidal matrix Notes ----- This is a LU factorization routine written for SciPy. Examples -------- >>> from scipy.linalg import lu >>> A = np.array([[2, 5, 8, 7], [5, 2, 2, 8], [7, 5, 6, 6], [5, 4, 4, 8]]) >>> p, l, u = lu(A) >>> np.allclose(A - p @ l @ u, np.zeros((4, 4))) True """ if check_finite: a1 = asarray_chkfinite(a) else: a1 = asarray(a) if len(a1.shape) != 2: raise ValueError('expected matrix') overwrite_a = overwrite_a or (_datacopied(a1, a)) flu, = get_flinalg_funcs(('lu',), (a1,)) p, l, u, info = flu(a1, permute_l=permute_l, overwrite_a=overwrite_a) if info < 0: raise ValueError('illegal value in %dth argument of ' 'internal lu.getrf' % -info) if permute_l: return l, u return p, l, u
bsd-3-clause
formath/mxnet
example/rcnn/rcnn/processing/bbox_regression.py
25
5519
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This file has functions about generating bounding box regression targets """ import numpy as np from ..logger import logger from bbox_transform import bbox_overlaps, bbox_transform from rcnn.config import config def compute_bbox_regression_targets(rois, overlaps, labels): """ given rois, overlaps, gt labels, compute bounding box regression targets :param rois: roidb[i]['boxes'] k * 4 :param overlaps: roidb[i]['max_overlaps'] k * 1 :param labels: roidb[i]['max_classes'] k * 1 :return: targets[i][class, dx, dy, dw, dh] k * 5 """ # Ensure ROIs are floats rois = rois.astype(np.float, copy=False) # Sanity check if len(rois) != len(overlaps): logger.warning('bbox regression: len(rois) != len(overlaps)') # Indices of ground-truth ROIs gt_inds = np.where(overlaps == 1)[0] if len(gt_inds) == 0: logger.warning('bbox regression: len(gt_inds) == 0') # Indices of examples for which we try to make predictions ex_inds = np.where(overlaps >= config.TRAIN.BBOX_REGRESSION_THRESH)[0] # Get IoU overlap between each ex ROI and gt ROI ex_gt_overlaps = bbox_overlaps(rois[ex_inds, :], rois[gt_inds, :]) # Find which gt ROI each ex ROI has max overlap with: # this will be the ex ROI's gt target gt_assignment = ex_gt_overlaps.argmax(axis=1) gt_rois = rois[gt_inds[gt_assignment], :] ex_rois = rois[ex_inds, :] targets = np.zeros((rois.shape[0], 5), dtype=np.float32) targets[ex_inds, 0] = labels[ex_inds] targets[ex_inds, 1:] = bbox_transform(ex_rois, gt_rois) return targets def add_bbox_regression_targets(roidb): """ given roidb, add ['bbox_targets'] and normalize bounding box regression targets :param roidb: roidb to be processed. must have gone through imdb.prepare_roidb :return: means, std variances of targets """ logger.info('bbox regression: add bounding box regression targets') assert len(roidb) > 0 assert 'max_classes' in roidb[0] num_images = len(roidb) num_classes = roidb[0]['gt_overlaps'].shape[1] for im_i in range(num_images): rois = roidb[im_i]['boxes'] max_overlaps = roidb[im_i]['max_overlaps'] max_classes = roidb[im_i]['max_classes'] roidb[im_i]['bbox_targets'] = compute_bbox_regression_targets(rois, max_overlaps, max_classes) if config.TRAIN.BBOX_NORMALIZATION_PRECOMPUTED: # use fixed / precomputed means and stds instead of empirical values means = np.tile(np.array(config.TRAIN.BBOX_MEANS), (num_classes, 1)) stds = np.tile(np.array(config.TRAIN.BBOX_STDS), (num_classes, 1)) else: # compute mean, std values class_counts = np.zeros((num_classes, 1)) + 1e-14 sums = np.zeros((num_classes, 4)) squared_sums = np.zeros((num_classes, 4)) for im_i in range(num_images): targets = roidb[im_i]['bbox_targets'] for cls in range(1, num_classes): cls_indexes = np.where(targets[:, 0] == cls)[0] if cls_indexes.size > 0: class_counts[cls] += cls_indexes.size sums[cls, :] += targets[cls_indexes, 1:].sum(axis=0) squared_sums[cls, :] += (targets[cls_indexes, 1:] ** 2).sum(axis=0) means = sums / class_counts # var(x) = E(x^2) - E(x)^2 stds = np.sqrt(squared_sums / class_counts - means ** 2) # normalized targets for im_i in range(num_images): targets = roidb[im_i]['bbox_targets'] for cls in range(1, num_classes): cls_indexes = np.where(targets[:, 0] == cls)[0] roidb[im_i]['bbox_targets'][cls_indexes, 1:] -= means[cls, :] roidb[im_i]['bbox_targets'][cls_indexes, 1:] /= stds[cls, :] return means.ravel(), stds.ravel() def expand_bbox_regression_targets(bbox_targets_data, num_classes): """ expand from 5 to 4 * num_classes; only the right class has non-zero bbox regression targets :param bbox_targets_data: [k * 5] :param num_classes: number of classes :return: bbox target processed [k * 4 num_classes] bbox_weights ! only foreground boxes have bbox regression computation! """ classes = bbox_targets_data[:, 0] bbox_targets = np.zeros((classes.size, 4 * num_classes), dtype=np.float32) bbox_weights = np.zeros(bbox_targets.shape, dtype=np.float32) indexes = np.where(classes > 0)[0] for index in indexes: cls = classes[index] start = int(4 * cls) end = start + 4 bbox_targets[index, start:end] = bbox_targets_data[index, 1:] bbox_weights[index, start:end] = config.TRAIN.BBOX_WEIGHTS return bbox_targets, bbox_weights
apache-2.0
JJRcop/tgstation
tools/midi2piano/easygui/__init__.py
101
87642
""" @version: 0.96(2010-08-29) @note: ABOUT EASYGUI EasyGui provides an easy-to-use interface for simple GUI interaction with a user. It does not require the programmer to know anything about tkinter, frames, widgets, callbacks or lambda. All GUI interactions are invoked by simple function calls that return results. @note: WARNING about using EasyGui with IDLE You may encounter problems using IDLE to run programs that use EasyGui. Try it and find out. EasyGui is a collection of Tkinter routines that run their own event loops. IDLE is also a Tkinter application, with its own event loop. The two may conflict, with unpredictable results. If you find that you have problems, try running your EasyGui program outside of IDLE. Note that EasyGui requires Tk release 8.0 or greater. @note: LICENSE INFORMATION EasyGui version 0.96 Copyright (c) 2010, Stephen Raymond Ferg All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. @note: ABOUT THE EASYGUI LICENSE This license is what is generally known as the "modified BSD license", aka "revised BSD", "new BSD", "3-clause BSD". See http://www.opensource.org/licenses/bsd-license.php This license is GPL-compatible. See http://en.wikipedia.org/wiki/License_compatibility See http://www.gnu.org/licenses/license-list.html#GPLCompatibleLicenses The BSD License is less restrictive than GPL. It allows software released under the license to be incorporated into proprietary products. Works based on the software may be released under a proprietary license or as closed source software. http://en.wikipedia.org/wiki/BSD_licenses#3-clause_license_.28.22New_BSD_License.22.29 """ egversion = __doc__.split()[1] __all__ = ['ynbox' , 'ccbox' , 'boolbox' , 'indexbox' , 'msgbox' , 'buttonbox' , 'integerbox' , 'multenterbox' , 'enterbox' , 'exceptionbox' , 'choicebox' , 'codebox' , 'textbox' , 'diropenbox' , 'fileopenbox' , 'filesavebox' , 'passwordbox' , 'multpasswordbox' , 'multchoicebox' , 'abouteasygui' , 'egversion' , 'egdemo' , 'EgStore' ] import sys, os import string import pickle import traceback #-------------------------------------------------- # check python version and take appropriate action #-------------------------------------------------- """ From the python documentation: sys.hexversion contains the version number encoded as a single integer. This is guaranteed to increase with each version, including proper support for non- production releases. For example, to test that the Python interpreter is at least version 1.5.2, use: if sys.hexversion >= 0x010502F0: # use some advanced feature ... else: # use an alternative implementation or warn the user ... """ if sys.hexversion >= 0x020600F0: runningPython26 = True else: runningPython26 = False if sys.hexversion >= 0x030000F0: runningPython3 = True else: runningPython3 = False try: from PIL import Image as PILImage from PIL import ImageTk as PILImageTk PILisLoaded = True except: PILisLoaded = False if runningPython3: from tkinter import * import tkinter.filedialog as tk_FileDialog from io import StringIO else: from Tkinter import * import tkFileDialog as tk_FileDialog from StringIO import StringIO def write(*args): args = [str(arg) for arg in args] args = " ".join(args) sys.stdout.write(args) def writeln(*args): write(*args) sys.stdout.write("\n") say = writeln if TkVersion < 8.0 : stars = "*"*75 writeln("""\n\n\n""" + stars + """ You are running Tk version: """ + str(TkVersion) + """ You must be using Tk version 8.0 or greater to use EasyGui. Terminating. """ + stars + """\n\n\n""") sys.exit(0) def dq(s): return '"%s"' % s rootWindowPosition = "+300+200" PROPORTIONAL_FONT_FAMILY = ("MS", "Sans", "Serif") MONOSPACE_FONT_FAMILY = ("Courier") PROPORTIONAL_FONT_SIZE = 10 MONOSPACE_FONT_SIZE = 9 #a little smaller, because it it more legible at a smaller size TEXT_ENTRY_FONT_SIZE = 12 # a little larger makes it easier to see #STANDARD_SELECTION_EVENTS = ["Return", "Button-1"] STANDARD_SELECTION_EVENTS = ["Return", "Button-1", "space"] # Initialize some global variables that will be reset later __choiceboxMultipleSelect = None __widgetTexts = None __replyButtonText = None __choiceboxResults = None __firstWidget = None __enterboxText = None __enterboxDefaultText="" __multenterboxText = "" choiceboxChoices = None choiceboxWidget = None entryWidget = None boxRoot = None ImageErrorMsg = ( "\n\n---------------------------------------------\n" "Error: %s\n%s") #------------------------------------------------------------------- # various boxes built on top of the basic buttonbox #----------------------------------------------------------------------- #----------------------------------------------------------------------- # ynbox #----------------------------------------------------------------------- def ynbox(msg="Shall I continue?" , title=" " , choices=("Yes", "No") , image=None ): """ Display a msgbox with choices of Yes and No. The default is "Yes". The returned value is calculated this way:: if the first choice ("Yes") is chosen, or if the dialog is cancelled: return 1 else: return 0 If invoked without a msg argument, displays a generic request for a confirmation that the user wishes to continue. So it can be used this way:: if ynbox(): pass # continue else: sys.exit(0) # exit the program @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ return boolbox(msg, title, choices, image=image) #----------------------------------------------------------------------- # ccbox #----------------------------------------------------------------------- def ccbox(msg="Shall I continue?" , title=" " , choices=("Continue", "Cancel") , image=None ): """ Display a msgbox with choices of Continue and Cancel. The default is "Continue". The returned value is calculated this way:: if the first choice ("Continue") is chosen, or if the dialog is cancelled: return 1 else: return 0 If invoked without a msg argument, displays a generic request for a confirmation that the user wishes to continue. So it can be used this way:: if ccbox(): pass # continue else: sys.exit(0) # exit the program @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ return boolbox(msg, title, choices, image=image) #----------------------------------------------------------------------- # boolbox #----------------------------------------------------------------------- def boolbox(msg="Shall I continue?" , title=" " , choices=("Yes","No") , image=None ): """ Display a boolean msgbox. The default is the first choice. The returned value is calculated this way:: if the first choice is chosen, or if the dialog is cancelled: returns 1 else: returns 0 """ reply = buttonbox(msg=msg, choices=choices, title=title, image=image) if reply == choices[0]: return 1 else: return 0 #----------------------------------------------------------------------- # indexbox #----------------------------------------------------------------------- def indexbox(msg="Shall I continue?" , title=" " , choices=("Yes","No") , image=None ): """ Display a buttonbox with the specified choices. Return the index of the choice selected. """ reply = buttonbox(msg=msg, choices=choices, title=title, image=image) index = -1 for choice in choices: index = index + 1 if reply == choice: return index raise AssertionError( "There is a program logic error in the EasyGui code for indexbox.") #----------------------------------------------------------------------- # msgbox #----------------------------------------------------------------------- def msgbox(msg="(Your message goes here)", title=" ", ok_button="OK",image=None,root=None): """ Display a messagebox """ if type(ok_button) != type("OK"): raise AssertionError("The 'ok_button' argument to msgbox must be a string.") return buttonbox(msg=msg, title=title, choices=[ok_button], image=image,root=root) #------------------------------------------------------------------- # buttonbox #------------------------------------------------------------------- def buttonbox(msg="",title=" " ,choices=("Button1", "Button2", "Button3") , image=None , root=None ): """ Display a msg, a title, and a set of buttons. The buttons are defined by the members of the choices list. Return the text of the button that the user selected. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame # Initialize __replyButtonText to the first choice. # This is what will be used if the window is closed by the close button. __replyButtonText = choices[0] if root: root.withdraw() boxRoot = Toplevel(master=root) boxRoot.withdraw() else: boxRoot = Tk() boxRoot.withdraw() boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose ) boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.minsize(400, 100) # ------------- define the messageFrame --------------------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) # ------------- define the imageFrame --------------------------------- tk_Image = None if image: imageFilename = os.path.normpath(image) junk,ext = os.path.splitext(imageFilename) if os.path.exists(imageFilename): if ext.lower() in [".gif", ".pgm", ".ppm"]: tk_Image = PhotoImage(master=boxRoot, file=imageFilename) else: if PILisLoaded: try: pil_Image = PILImage.open(imageFilename) tk_Image = PILImageTk.PhotoImage(pil_Image, master=boxRoot) except: msg += ImageErrorMsg % (imageFilename, "\nThe Python Imaging Library (PIL) could not convert this file to a displayable image." "\n\nPIL reports:\n" + exception_format()) else: # PIL is not loaded msg += ImageErrorMsg % (imageFilename, "\nI could not import the Python Imaging Library (PIL) to display the image.\n\n" "You may need to install PIL\n" "(http://www.pythonware.com/products/pil/)\n" "to display " + ext + " image files.") else: msg += ImageErrorMsg % (imageFilename, "\nImage file not found.") if tk_Image: imageFrame = Frame(master=boxRoot) imageFrame.pack(side=TOP, fill=BOTH) label = Label(imageFrame,image=tk_Image) label.image = tk_Image # keep a reference! label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m') # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) # -------------------- place the widgets in the frames ----------------------- messageWidget = Message(messageFrame, text=msg, width=400) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m') __put_buttons_in_buttonframe(choices) # -------------- the action begins ----------- # put the focus on the first button __firstWidget.focus_force() boxRoot.deiconify() boxRoot.mainloop() boxRoot.destroy() if root: root.deiconify() return __replyButtonText #------------------------------------------------------------------- # integerbox #------------------------------------------------------------------- def integerbox(msg="" , title=" " , default="" , lowerbound=0 , upperbound=99 , image = None , root = None , **invalidKeywordArguments ): """ Show a box in which a user can enter an integer. In addition to arguments for msg and title, this function accepts integer arguments for "default", "lowerbound", and "upperbound". The default argument may be None. When the user enters some text, the text is checked to verify that it can be converted to an integer between the lowerbound and upperbound. If it can be, the integer (not the text) is returned. If it cannot, then an error msg is displayed, and the integerbox is redisplayed. If the user cancels the operation, None is returned. NOTE that the "argLowerBound" and "argUpperBound" arguments are no longer supported. They have been replaced by "upperbound" and "lowerbound". """ if "argLowerBound" in invalidKeywordArguments: raise AssertionError( "\nintegerbox no longer supports the 'argLowerBound' argument.\n" + "Use 'lowerbound' instead.\n\n") if "argUpperBound" in invalidKeywordArguments: raise AssertionError( "\nintegerbox no longer supports the 'argUpperBound' argument.\n" + "Use 'upperbound' instead.\n\n") if default != "": if type(default) != type(1): raise AssertionError( "integerbox received a non-integer value for " + "default of " + dq(str(default)) , "Error") if type(lowerbound) != type(1): raise AssertionError( "integerbox received a non-integer value for " + "lowerbound of " + dq(str(lowerbound)) , "Error") if type(upperbound) != type(1): raise AssertionError( "integerbox received a non-integer value for " + "upperbound of " + dq(str(upperbound)) , "Error") if msg == "": msg = ("Enter an integer between " + str(lowerbound) + " and " + str(upperbound) ) while 1: reply = enterbox(msg, title, str(default), image=image, root=root) if reply == None: return None try: reply = int(reply) except: msgbox ("The value that you entered:\n\t%s\nis not an integer." % dq(str(reply)) , "Error") continue if reply < lowerbound: msgbox ("The value that you entered is less than the lower bound of " + str(lowerbound) + ".", "Error") continue if reply > upperbound: msgbox ("The value that you entered is greater than the upper bound of " + str(upperbound) + ".", "Error") continue # reply has passed all validation checks. # It is an integer between the specified bounds. return reply #------------------------------------------------------------------- # multenterbox #------------------------------------------------------------------- def multenterbox(msg="Fill in values for the fields." , title=" " , fields=() , values=() ): r""" Show screen with multiple data entry fields. If there are fewer values than names, the list of values is padded with empty strings until the number of values is the same as the number of names. If there are more values than names, the list of values is truncated so that there are as many values as names. Returns a list of the values of the fields, or None if the user cancels the operation. Here is some example code, that shows how values returned from multenterbox can be checked for validity before they are accepted:: ---------------------------------------------------------------------- msg = "Enter your personal information" title = "Credit Card Application" fieldNames = ["Name","Street Address","City","State","ZipCode"] fieldValues = [] # we start with blanks for the values fieldValues = multenterbox(msg,title, fieldNames) # make sure that none of the fields was left blank while 1: if fieldValues == None: break errmsg = "" for i in range(len(fieldNames)): if fieldValues[i].strip() == "": errmsg += ('"%s" is a required field.\n\n' % fieldNames[i]) if errmsg == "": break # no problems found fieldValues = multenterbox(errmsg, title, fieldNames, fieldValues) writeln("Reply was: %s" % str(fieldValues)) ---------------------------------------------------------------------- @arg msg: the msg to be displayed. @arg title: the window title @arg fields: a list of fieldnames. @arg values: a list of field values """ return __multfillablebox(msg,title,fields,values,None) #----------------------------------------------------------------------- # multpasswordbox #----------------------------------------------------------------------- def multpasswordbox(msg="Fill in values for the fields." , title=" " , fields=tuple() ,values=tuple() ): r""" Same interface as multenterbox. But in multpassword box, the last of the fields is assumed to be a password, and is masked with asterisks. Example ======= Here is some example code, that shows how values returned from multpasswordbox can be checked for validity before they are accepted:: msg = "Enter logon information" title = "Demo of multpasswordbox" fieldNames = ["Server ID", "User ID", "Password"] fieldValues = [] # we start with blanks for the values fieldValues = multpasswordbox(msg,title, fieldNames) # make sure that none of the fields was left blank while 1: if fieldValues == None: break errmsg = "" for i in range(len(fieldNames)): if fieldValues[i].strip() == "": errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i]) if errmsg == "": break # no problems found fieldValues = multpasswordbox(errmsg, title, fieldNames, fieldValues) writeln("Reply was: %s" % str(fieldValues)) """ return __multfillablebox(msg,title,fields,values,"*") def bindArrows(widget): widget.bind("<Down>", tabRight) widget.bind("<Up>" , tabLeft) widget.bind("<Right>",tabRight) widget.bind("<Left>" , tabLeft) def tabRight(event): boxRoot.event_generate("<Tab>") def tabLeft(event): boxRoot.event_generate("<Shift-Tab>") #----------------------------------------------------------------------- # __multfillablebox #----------------------------------------------------------------------- def __multfillablebox(msg="Fill in values for the fields." , title=" " , fields=() , values=() , mask = None ): global boxRoot, __multenterboxText, __multenterboxDefaultText, cancelButton, entryWidget, okButton choices = ["OK", "Cancel"] if len(fields) == 0: return None fields = list(fields[:]) # convert possible tuples to a list values = list(values[:]) # convert possible tuples to a list if len(values) == len(fields): pass elif len(values) > len(fields): fields = fields[0:len(values)] else: while len(values) < len(fields): values.append("") boxRoot = Tk() boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose ) boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.bind("<Escape>", __multenterboxCancel) # -------------------- put subframes in the boxRoot -------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) #-------------------- the msg widget ---------------------------- messageWidget = Message(messageFrame, width="4.5i", text=msg) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m') global entryWidgets entryWidgets = [] lastWidgetIndex = len(fields) - 1 for widgetIndex in range(len(fields)): argFieldName = fields[widgetIndex] argFieldValue = values[widgetIndex] entryFrame = Frame(master=boxRoot) entryFrame.pack(side=TOP, fill=BOTH) # --------- entryWidget ---------------------------------------------- labelWidget = Label(entryFrame, text=argFieldName) labelWidget.pack(side=LEFT) entryWidget = Entry(entryFrame, width=40,highlightthickness=2) entryWidgets.append(entryWidget) entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,TEXT_ENTRY_FONT_SIZE)) entryWidget.pack(side=RIGHT, padx="3m") bindArrows(entryWidget) entryWidget.bind("<Return>", __multenterboxGetText) entryWidget.bind("<Escape>", __multenterboxCancel) # for the last entryWidget, if this is a multpasswordbox, # show the contents as just asterisks if widgetIndex == lastWidgetIndex: if mask: entryWidgets[widgetIndex].configure(show=mask) # put text into the entryWidget entryWidgets[widgetIndex].insert(0,argFieldValue) widgetIndex += 1 # ------------------ ok button ------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=BOTTOM, fill=BOTH) okButton = Button(buttonsFrame, takefocus=1, text="OK") bindArrows(okButton) okButton.pack(expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __multenterboxGetText for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------ cancel button ------------------------------- cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel") bindArrows(cancelButton) cancelButton.pack(expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = cancelButton handler = __multenterboxCancel for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------- time for action! ----------------- entryWidgets[0].focus_force() # put the focus on the entryWidget boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now return __multenterboxText #----------------------------------------------------------------------- # __multenterboxGetText #----------------------------------------------------------------------- def __multenterboxGetText(event): global __multenterboxText __multenterboxText = [] for entryWidget in entryWidgets: __multenterboxText.append(entryWidget.get()) boxRoot.quit() def __multenterboxCancel(event): global __multenterboxText __multenterboxText = None boxRoot.quit() #------------------------------------------------------------------- # enterbox #------------------------------------------------------------------- def enterbox(msg="Enter something." , title=" " , default="" , strip=True , image=None , root=None ): """ Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation. By default, enterbox strips its result (i.e. removes leading and trailing whitespace). (If you want it not to strip, use keyword argument: strip=False.) This makes it easier to test the results of the call:: reply = enterbox(....) if reply: ... else: ... """ result = __fillablebox(msg, title, default=default, mask=None,image=image,root=root) if result and strip: result = result.strip() return result def passwordbox(msg="Enter your password." , title=" " , default="" , image=None , root=None ): """ Show a box in which a user can enter a password. The text is masked with asterisks, so the password is not displayed. Returns the text that the user entered, or None if he cancels the operation. """ return __fillablebox(msg, title, default, mask="*",image=image,root=root) def __fillablebox(msg , title="" , default="" , mask=None , image=None , root=None ): """ Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation. """ global boxRoot, __enterboxText, __enterboxDefaultText global cancelButton, entryWidget, okButton if title == None: title == "" if default == None: default = "" __enterboxDefaultText = default __enterboxText = __enterboxDefaultText if root: root.withdraw() boxRoot = Toplevel(master=root) boxRoot.withdraw() else: boxRoot = Tk() boxRoot.withdraw() boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose ) boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(rootWindowPosition) boxRoot.bind("<Escape>", __enterboxCancel) # ------------- define the messageFrame --------------------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) # ------------- define the imageFrame --------------------------------- tk_Image = None if image: imageFilename = os.path.normpath(image) junk,ext = os.path.splitext(imageFilename) if os.path.exists(imageFilename): if ext.lower() in [".gif", ".pgm", ".ppm"]: tk_Image = PhotoImage(master=boxRoot, file=imageFilename) else: if PILisLoaded: try: pil_Image = PILImage.open(imageFilename) tk_Image = PILImageTk.PhotoImage(pil_Image, master=boxRoot) except: msg += ImageErrorMsg % (imageFilename, "\nThe Python Imaging Library (PIL) could not convert this file to a displayable image." "\n\nPIL reports:\n" + exception_format()) else: # PIL is not loaded msg += ImageErrorMsg % (imageFilename, "\nI could not import the Python Imaging Library (PIL) to display the image.\n\n" "You may need to install PIL\n" "(http://www.pythonware.com/products/pil/)\n" "to display " + ext + " image files.") else: msg += ImageErrorMsg % (imageFilename, "\nImage file not found.") if tk_Image: imageFrame = Frame(master=boxRoot) imageFrame.pack(side=TOP, fill=BOTH) label = Label(imageFrame,image=tk_Image) label.image = tk_Image # keep a reference! label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m') # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) # ------------- define the entryFrame --------------------------------- entryFrame = Frame(master=boxRoot) entryFrame.pack(side=TOP, fill=BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) #-------------------- the msg widget ---------------------------- messageWidget = Message(messageFrame, width="4.5i", text=msg) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m') # --------- entryWidget ---------------------------------------------- entryWidget = Entry(entryFrame, width=40) bindArrows(entryWidget) entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,TEXT_ENTRY_FONT_SIZE)) if mask: entryWidget.configure(show=mask) entryWidget.pack(side=LEFT, padx="3m") entryWidget.bind("<Return>", __enterboxGetText) entryWidget.bind("<Escape>", __enterboxCancel) # put text into the entryWidget entryWidget.insert(0,__enterboxDefaultText) # ------------------ ok button ------------------------------- okButton = Button(buttonsFrame, takefocus=1, text="OK") bindArrows(okButton) okButton.pack(expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __enterboxGetText for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------ cancel button ------------------------------- cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel") bindArrows(cancelButton) cancelButton.pack(expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event handler commandButton = cancelButton handler = __enterboxCancel for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------- time for action! ----------------- entryWidget.focus_force() # put the focus on the entryWidget boxRoot.deiconify() boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- if root: root.deiconify() boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now return __enterboxText def __enterboxGetText(event): global __enterboxText __enterboxText = entryWidget.get() boxRoot.quit() def __enterboxRestore(event): global entryWidget entryWidget.delete(0,len(entryWidget.get())) entryWidget.insert(0, __enterboxDefaultText) def __enterboxCancel(event): global __enterboxText __enterboxText = None boxRoot.quit() def denyWindowManagerClose(): """ don't allow WindowManager close """ x = Tk() x.withdraw() x.bell() x.destroy() #------------------------------------------------------------------- # multchoicebox #------------------------------------------------------------------- def multchoicebox(msg="Pick as many items as you like." , title=" " , choices=() , **kwargs ): """ Present the user with a list of choices. allow him to select multiple items and return them in a list. if the user doesn't choose anything from the list, return the empty list. return None if he cancelled selection. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ if len(choices) == 0: choices = ["Program logic error - no choices were specified."] global __choiceboxMultipleSelect __choiceboxMultipleSelect = 1 return __choicebox(msg, title, choices) #----------------------------------------------------------------------- # choicebox #----------------------------------------------------------------------- def choicebox(msg="Pick something." , title=" " , choices=() ): """ Present the user with a list of choices. return the choice that he selects. return None if he cancels the selection selection. @arg msg: the msg to be displayed. @arg title: the window title @arg choices: a list or tuple of the choices to be displayed """ if len(choices) == 0: choices = ["Program logic error - no choices were specified."] global __choiceboxMultipleSelect __choiceboxMultipleSelect = 0 return __choicebox(msg,title,choices) #----------------------------------------------------------------------- # __choicebox #----------------------------------------------------------------------- def __choicebox(msg , title , choices ): """ internal routine to support choicebox() and multchoicebox() """ global boxRoot, __choiceboxResults, choiceboxWidget, defaultText global choiceboxWidget, choiceboxChoices #------------------------------------------------------------------- # If choices is a tuple, we make it a list so we can sort it. # If choices is already a list, we make a new list, so that when # we sort the choices, we don't affect the list object that we # were given. #------------------------------------------------------------------- choices = list(choices[:]) if len(choices) == 0: choices = ["Program logic error - no choices were specified."] defaultButtons = ["OK", "Cancel"] # make sure all choices are strings for index in range(len(choices)): choices[index] = str(choices[index]) lines_to_show = min(len(choices), 20) lines_to_show = 20 if title == None: title = "" # Initialize __choiceboxResults # This is the value that will be returned if the user clicks the close icon __choiceboxResults = None boxRoot = Tk() boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose ) screen_width = boxRoot.winfo_screenwidth() screen_height = boxRoot.winfo_screenheight() root_width = int((screen_width * 0.8)) root_height = int((screen_height * 0.5)) root_xpos = int((screen_width * 0.1)) root_ypos = int((screen_height * 0.05)) boxRoot.title(title) boxRoot.iconname('Dialog') rootWindowPosition = "+0+0" boxRoot.geometry(rootWindowPosition) boxRoot.expand=NO boxRoot.minsize(root_width, root_height) rootWindowPosition = "+" + str(root_xpos) + "+" + str(root_ypos) boxRoot.geometry(rootWindowPosition) # ---------------- put the frames in the window ----------------------------------------- message_and_buttonsFrame = Frame(master=boxRoot) message_and_buttonsFrame.pack(side=TOP, fill=X, expand=NO) messageFrame = Frame(message_and_buttonsFrame) messageFrame.pack(side=LEFT, fill=X, expand=YES) #messageFrame.pack(side=TOP, fill=X, expand=YES) buttonsFrame = Frame(message_and_buttonsFrame) buttonsFrame.pack(side=RIGHT, expand=NO, pady=0) #buttonsFrame.pack(side=TOP, expand=YES, pady=0) choiceboxFrame = Frame(master=boxRoot) choiceboxFrame.pack(side=BOTTOM, fill=BOTH, expand=YES) # -------------------------- put the widgets in the frames ------------------------------ # ---------- put a msg widget in the msg frame------------------- messageWidget = Message(messageFrame, anchor=NW, text=msg, width=int(root_width * 0.9)) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=LEFT, expand=YES, fill=BOTH, padx='1m', pady='1m') # -------- put the choiceboxWidget in the choiceboxFrame --------------------------- choiceboxWidget = Listbox(choiceboxFrame , height=lines_to_show , borderwidth="1m" , relief="flat" , bg="white" ) if __choiceboxMultipleSelect: choiceboxWidget.configure(selectmode=MULTIPLE) choiceboxWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) # add a vertical scrollbar to the frame rightScrollbar = Scrollbar(choiceboxFrame, orient=VERTICAL, command=choiceboxWidget.yview) choiceboxWidget.configure(yscrollcommand = rightScrollbar.set) # add a horizontal scrollbar to the frame bottomScrollbar = Scrollbar(choiceboxFrame, orient=HORIZONTAL, command=choiceboxWidget.xview) choiceboxWidget.configure(xscrollcommand = bottomScrollbar.set) # pack the Listbox and the scrollbars. Note that although we must define # the textArea first, we must pack it last, so that the bottomScrollbar will # be located properly. bottomScrollbar.pack(side=BOTTOM, fill = X) rightScrollbar.pack(side=RIGHT, fill = Y) choiceboxWidget.pack(side=LEFT, padx="1m", pady="1m", expand=YES, fill=BOTH) #--------------------------------------------------- # sort the choices # eliminate duplicates # put the choices into the choiceboxWidget #--------------------------------------------------- for index in range(len(choices)): choices[index] = str(choices[index]) if runningPython3: choices.sort(key=str.lower) else: choices.sort( lambda x,y: cmp(x.lower(), y.lower())) # case-insensitive sort lastInserted = None choiceboxChoices = [] for choice in choices: if choice == lastInserted: pass else: choiceboxWidget.insert(END, choice) choiceboxChoices.append(choice) lastInserted = choice boxRoot.bind('<Any-Key>', KeyboardListener) # put the buttons in the buttonsFrame if len(choices) > 0: okButton = Button(buttonsFrame, takefocus=YES, text="OK", height=1, width=6) bindArrows(okButton) okButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __choiceboxGetChoice for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # now bind the keyboard events choiceboxWidget.bind("<Return>", __choiceboxGetChoice) choiceboxWidget.bind("<Double-Button-1>", __choiceboxGetChoice) else: # now bind the keyboard events choiceboxWidget.bind("<Return>", __choiceboxCancel) choiceboxWidget.bind("<Double-Button-1>", __choiceboxCancel) cancelButton = Button(buttonsFrame, takefocus=YES, text="Cancel", height=1, width=6) bindArrows(cancelButton) cancelButton.pack(expand=NO, side=BOTTOM, padx='2m', pady='1m', ipady="1m", ipadx="2m") # for the commandButton, bind activation events to the activation event handler commandButton = cancelButton handler = __choiceboxCancel for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # add special buttons for multiple select features if len(choices) > 0 and __choiceboxMultipleSelect: selectionButtonsFrame = Frame(messageFrame) selectionButtonsFrame.pack(side=RIGHT, fill=Y, expand=NO) selectAllButton = Button(selectionButtonsFrame, text="Select All", height=1, width=6) bindArrows(selectAllButton) selectAllButton.bind("<Button-1>",__choiceboxSelectAll) selectAllButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") clearAllButton = Button(selectionButtonsFrame, text="Clear All", height=1, width=6) bindArrows(clearAllButton) clearAllButton.bind("<Button-1>",__choiceboxClearAll) clearAllButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") # -------------------- bind some keyboard events ---------------------------- boxRoot.bind("<Escape>", __choiceboxCancel) # --------------------- the action begins ----------------------------------- # put the focus on the choiceboxWidget, and the select highlight on the first item choiceboxWidget.select_set(0) choiceboxWidget.focus_force() # --- run it! ----- boxRoot.mainloop() boxRoot.destroy() return __choiceboxResults def __choiceboxGetChoice(event): global boxRoot, __choiceboxResults, choiceboxWidget if __choiceboxMultipleSelect: __choiceboxResults = [choiceboxWidget.get(index) for index in choiceboxWidget.curselection()] else: choice_index = choiceboxWidget.curselection() __choiceboxResults = choiceboxWidget.get(choice_index) # writeln("Debugging> mouse-event=", event, " event.type=", event.type) # writeln("Debugging> choice=", choice_index, __choiceboxResults) boxRoot.quit() def __choiceboxSelectAll(event): global choiceboxWidget, choiceboxChoices choiceboxWidget.selection_set(0, len(choiceboxChoices)-1) def __choiceboxClearAll(event): global choiceboxWidget, choiceboxChoices choiceboxWidget.selection_clear(0, len(choiceboxChoices)-1) def __choiceboxCancel(event): global boxRoot, __choiceboxResults __choiceboxResults = None boxRoot.quit() def KeyboardListener(event): global choiceboxChoices, choiceboxWidget key = event.keysym if len(key) <= 1: if key in string.printable: # Find the key in the list. # before we clear the list, remember the selected member try: start_n = int(choiceboxWidget.curselection()[0]) except IndexError: start_n = -1 ## clear the selection. choiceboxWidget.selection_clear(0, 'end') ## start from previous selection +1 for n in range(start_n+1, len(choiceboxChoices)): item = choiceboxChoices[n] if item[0].lower() == key.lower(): choiceboxWidget.selection_set(first=n) choiceboxWidget.see(n) return else: # has not found it so loop from top for n in range(len(choiceboxChoices)): item = choiceboxChoices[n] if item[0].lower() == key.lower(): choiceboxWidget.selection_set(first = n) choiceboxWidget.see(n) return # nothing matched -- we'll look for the next logical choice for n in range(len(choiceboxChoices)): item = choiceboxChoices[n] if item[0].lower() > key.lower(): if n > 0: choiceboxWidget.selection_set(first = (n-1)) else: choiceboxWidget.selection_set(first = 0) choiceboxWidget.see(n) return # still no match (nothing was greater than the key) # we set the selection to the first item in the list lastIndex = len(choiceboxChoices)-1 choiceboxWidget.selection_set(first = lastIndex) choiceboxWidget.see(lastIndex) return #----------------------------------------------------------------------- # exception_format #----------------------------------------------------------------------- def exception_format(): """ Convert exception info into a string suitable for display. """ return "".join(traceback.format_exception( sys.exc_info()[0] , sys.exc_info()[1] , sys.exc_info()[2] )) #----------------------------------------------------------------------- # exceptionbox #----------------------------------------------------------------------- def exceptionbox(msg=None, title=None): """ Display a box that gives information about an exception that has just been raised. The caller may optionally pass in a title for the window, or a msg to accompany the error information. Note that you do not need to (and cannot) pass an exception object as an argument. The latest exception will automatically be used. """ if title == None: title = "Error Report" if msg == None: msg = "An error (exception) has occurred in the program." codebox(msg, title, exception_format()) #------------------------------------------------------------------- # codebox #------------------------------------------------------------------- def codebox(msg="" , title=" " , text="" ): """ Display some text in a monospaced font, with no line wrapping. This function is suitable for displaying code and text that is formatted using spaces. The text parameter should be a string, or a list or tuple of lines to be displayed in the textbox. """ return textbox(msg, title, text, codebox=1 ) #------------------------------------------------------------------- # textbox #------------------------------------------------------------------- def textbox(msg="" , title=" " , text="" , codebox=0 ): """ Display some text in a proportional font with line wrapping at word breaks. This function is suitable for displaying general written text. The text parameter should be a string, or a list or tuple of lines to be displayed in the textbox. """ if msg == None: msg = "" if title == None: title = "" global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame global rootWindowPosition choices = ["OK"] __replyButtonText = choices[0] boxRoot = Tk() boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose ) screen_width = boxRoot.winfo_screenwidth() screen_height = boxRoot.winfo_screenheight() root_width = int((screen_width * 0.8)) root_height = int((screen_height * 0.5)) root_xpos = int((screen_width * 0.1)) root_ypos = int((screen_height * 0.05)) boxRoot.title(title) boxRoot.iconname('Dialog') rootWindowPosition = "+0+0" boxRoot.geometry(rootWindowPosition) boxRoot.expand=NO boxRoot.minsize(root_width, root_height) rootWindowPosition = "+" + str(root_xpos) + "+" + str(root_ypos) boxRoot.geometry(rootWindowPosition) mainframe = Frame(master=boxRoot) mainframe.pack(side=TOP, fill=BOTH, expand=YES) # ---- put frames in the window ----------------------------------- # we pack the textboxFrame first, so it will expand first textboxFrame = Frame(mainframe, borderwidth=3) textboxFrame.pack(side=BOTTOM , fill=BOTH, expand=YES) message_and_buttonsFrame = Frame(mainframe) message_and_buttonsFrame.pack(side=TOP, fill=X, expand=NO) messageFrame = Frame(message_and_buttonsFrame) messageFrame.pack(side=LEFT, fill=X, expand=YES) buttonsFrame = Frame(message_and_buttonsFrame) buttonsFrame.pack(side=RIGHT, expand=NO) # -------------------- put widgets in the frames -------------------- # put a textArea in the top frame if codebox: character_width = int((root_width * 0.6) / MONOSPACE_FONT_SIZE) textArea = Text(textboxFrame,height=25,width=character_width, padx="2m", pady="1m") textArea.configure(wrap=NONE) textArea.configure(font=(MONOSPACE_FONT_FAMILY, MONOSPACE_FONT_SIZE)) else: character_width = int((root_width * 0.6) / MONOSPACE_FONT_SIZE) textArea = Text( textboxFrame , height=25 , width=character_width , padx="2m" , pady="1m" ) textArea.configure(wrap=WORD) textArea.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) # some simple keybindings for scrolling mainframe.bind("<Next>" , textArea.yview_scroll( 1,PAGES)) mainframe.bind("<Prior>", textArea.yview_scroll(-1,PAGES)) mainframe.bind("<Right>", textArea.xview_scroll( 1,PAGES)) mainframe.bind("<Left>" , textArea.xview_scroll(-1,PAGES)) mainframe.bind("<Down>", textArea.yview_scroll( 1,UNITS)) mainframe.bind("<Up>" , textArea.yview_scroll(-1,UNITS)) # add a vertical scrollbar to the frame rightScrollbar = Scrollbar(textboxFrame, orient=VERTICAL, command=textArea.yview) textArea.configure(yscrollcommand = rightScrollbar.set) # add a horizontal scrollbar to the frame bottomScrollbar = Scrollbar(textboxFrame, orient=HORIZONTAL, command=textArea.xview) textArea.configure(xscrollcommand = bottomScrollbar.set) # pack the textArea and the scrollbars. Note that although we must define # the textArea first, we must pack it last, so that the bottomScrollbar will # be located properly. # Note that we need a bottom scrollbar only for code. # Text will be displayed with wordwrap, so we don't need to have a horizontal # scroll for it. if codebox: bottomScrollbar.pack(side=BOTTOM, fill=X) rightScrollbar.pack(side=RIGHT, fill=Y) textArea.pack(side=LEFT, fill=BOTH, expand=YES) # ---------- put a msg widget in the msg frame------------------- messageWidget = Message(messageFrame, anchor=NW, text=msg, width=int(root_width * 0.9)) messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=LEFT, expand=YES, fill=BOTH, padx='1m', pady='1m') # put the buttons in the buttonsFrame okButton = Button(buttonsFrame, takefocus=YES, text="OK", height=1, width=6) okButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") # for the commandButton, bind activation events to the activation event handler commandButton = okButton handler = __textboxOK for selectionEvent in ["Return","Button-1","Escape"]: commandButton.bind("<%s>" % selectionEvent, handler) # ----------------- the action begins ---------------------------------------- try: # load the text into the textArea if type(text) == type("abc"): pass else: try: text = "".join(text) # convert a list or a tuple to a string except: msgbox("Exception when trying to convert "+ str(type(text)) + " to text in textArea") sys.exit(16) textArea.insert(END,text, "normal") except: msgbox("Exception when trying to load the textArea.") sys.exit(16) try: okButton.focus_force() except: msgbox("Exception when trying to put focus on okButton.") sys.exit(16) boxRoot.mainloop() # this line MUST go before the line that destroys boxRoot areaText = textArea.get(0.0,END) boxRoot.destroy() return areaText # return __replyButtonText #------------------------------------------------------------------- # __textboxOK #------------------------------------------------------------------- def __textboxOK(event): global boxRoot boxRoot.quit() #------------------------------------------------------------------- # diropenbox #------------------------------------------------------------------- def diropenbox(msg=None , title=None , default=None ): """ A dialog to get a directory name. Note that the msg argument, if specified, is ignored. Returns the name of a directory, or None if user chose to cancel. If the "default" argument specifies a directory name, and that directory exists, then the dialog box will start with that directory. """ title=getFileDialogTitle(msg,title) localRoot = Tk() localRoot.withdraw() if not default: default = None f = tk_FileDialog.askdirectory( parent=localRoot , title=title , initialdir=default , initialfile=None ) localRoot.destroy() if not f: return None return os.path.normpath(f) #------------------------------------------------------------------- # getFileDialogTitle #------------------------------------------------------------------- def getFileDialogTitle(msg , title ): if msg and title: return "%s - %s" % (title,msg) if msg and not title: return str(msg) if title and not msg: return str(title) return None # no message and no title #------------------------------------------------------------------- # class FileTypeObject for use with fileopenbox #------------------------------------------------------------------- class FileTypeObject: def __init__(self,filemask): if len(filemask) == 0: raise AssertionError('Filetype argument is empty.') self.masks = [] if type(filemask) == type("abc"): # a string self.initializeFromString(filemask) elif type(filemask) == type([]): # a list if len(filemask) < 2: raise AssertionError('Invalid filemask.\n' +'List contains less than 2 members: "%s"' % filemask) else: self.name = filemask[-1] self.masks = list(filemask[:-1] ) else: raise AssertionError('Invalid filemask: "%s"' % filemask) def __eq__(self,other): if self.name == other.name: return True return False def add(self,other): for mask in other.masks: if mask in self.masks: pass else: self.masks.append(mask) def toTuple(self): return (self.name,tuple(self.masks)) def isAll(self): if self.name == "All files": return True return False def initializeFromString(self, filemask): # remove everything except the extension from the filemask self.ext = os.path.splitext(filemask)[1] if self.ext == "" : self.ext = ".*" if self.ext == ".": self.ext = ".*" self.name = self.getName() self.masks = ["*" + self.ext] def getName(self): e = self.ext if e == ".*" : return "All files" if e == ".txt": return "Text files" if e == ".py" : return "Python files" if e == ".pyc" : return "Python files" if e == ".xls": return "Excel files" if e.startswith("."): return e[1:].upper() + " files" return e.upper() + " files" #------------------------------------------------------------------- # fileopenbox #------------------------------------------------------------------- def fileopenbox(msg=None , title=None , default="*" , filetypes=None ): """ A dialog to get a file name. About the "default" argument ============================ The "default" argument specifies a filepath that (normally) contains one or more wildcards. fileopenbox will display only files that match the default filepath. If omitted, defaults to "*" (all files in the current directory). WINDOWS EXAMPLE:: ...default="c:/myjunk/*.py" will open in directory c:\myjunk\ and show all Python files. WINDOWS EXAMPLE:: ...default="c:/myjunk/test*.py" will open in directory c:\myjunk\ and show all Python files whose names begin with "test". Note that on Windows, fileopenbox automatically changes the path separator to the Windows path separator (backslash). About the "filetypes" argument ============================== If specified, it should contain a list of items, where each item is either:: - a string containing a filemask # e.g. "*.txt" - a list of strings, where all of the strings except the last one are filemasks (each beginning with "*.", such as "*.txt" for text files, "*.py" for Python files, etc.). and the last string contains a filetype description EXAMPLE:: filetypes = ["*.css", ["*.htm", "*.html", "HTML files"] ] NOTE THAT ========= If the filetypes list does not contain ("All files","*"), it will be added. If the filetypes list does not contain a filemask that includes the extension of the "default" argument, it will be added. For example, if default="*abc.py" and no filetypes argument was specified, then "*.py" will automatically be added to the filetypes argument. @rtype: string or None @return: the name of a file, or None if user chose to cancel @arg msg: the msg to be displayed. @arg title: the window title @arg default: filepath with wildcards @arg filetypes: filemasks that a user can choose, e.g. "*.txt" """ localRoot = Tk() localRoot.withdraw() initialbase, initialfile, initialdir, filetypes = fileboxSetup(default,filetypes) #------------------------------------------------------------ # if initialfile contains no wildcards; we don't want an # initial file. It won't be used anyway. # Also: if initialbase is simply "*", we don't want an # initialfile; it is not doing any useful work. #------------------------------------------------------------ if (initialfile.find("*") < 0) and (initialfile.find("?") < 0): initialfile = None elif initialbase == "*": initialfile = None f = tk_FileDialog.askopenfilename(parent=localRoot , title=getFileDialogTitle(msg,title) , initialdir=initialdir , initialfile=initialfile , filetypes=filetypes ) localRoot.destroy() if not f: return None return os.path.normpath(f) #------------------------------------------------------------------- # filesavebox #------------------------------------------------------------------- def filesavebox(msg=None , title=None , default="" , filetypes=None ): """ A file to get the name of a file to save. Returns the name of a file, or None if user chose to cancel. The "default" argument should contain a filename (i.e. the current name of the file to be saved). It may also be empty, or contain a filemask that includes wildcards. The "filetypes" argument works like the "filetypes" argument to fileopenbox. """ localRoot = Tk() localRoot.withdraw() initialbase, initialfile, initialdir, filetypes = fileboxSetup(default,filetypes) f = tk_FileDialog.asksaveasfilename(parent=localRoot , title=getFileDialogTitle(msg,title) , initialfile=initialfile , initialdir=initialdir , filetypes=filetypes ) localRoot.destroy() if not f: return None return os.path.normpath(f) #------------------------------------------------------------------- # # fileboxSetup # #------------------------------------------------------------------- def fileboxSetup(default,filetypes): if not default: default = os.path.join(".","*") initialdir, initialfile = os.path.split(default) if not initialdir : initialdir = "." if not initialfile: initialfile = "*" initialbase, initialext = os.path.splitext(initialfile) initialFileTypeObject = FileTypeObject(initialfile) allFileTypeObject = FileTypeObject("*") ALL_filetypes_was_specified = False if not filetypes: filetypes= [] filetypeObjects = [] for filemask in filetypes: fto = FileTypeObject(filemask) if fto.isAll(): ALL_filetypes_was_specified = True # remember this if fto == initialFileTypeObject: initialFileTypeObject.add(fto) # add fto to initialFileTypeObject else: filetypeObjects.append(fto) #------------------------------------------------------------------ # make sure that the list of filetypes includes the ALL FILES type. #------------------------------------------------------------------ if ALL_filetypes_was_specified: pass elif allFileTypeObject == initialFileTypeObject: pass else: filetypeObjects.insert(0,allFileTypeObject) #------------------------------------------------------------------ # Make sure that the list includes the initialFileTypeObject # in the position in the list that will make it the default. # This changed between Python version 2.5 and 2.6 #------------------------------------------------------------------ if len(filetypeObjects) == 0: filetypeObjects.append(initialFileTypeObject) if initialFileTypeObject in (filetypeObjects[0], filetypeObjects[-1]): pass else: if runningPython26: filetypeObjects.append(initialFileTypeObject) else: filetypeObjects.insert(0,initialFileTypeObject) filetypes = [fto.toTuple() for fto in filetypeObjects] return initialbase, initialfile, initialdir, filetypes #------------------------------------------------------------------- # utility routines #------------------------------------------------------------------- # These routines are used by several other functions in the EasyGui module. def __buttonEvent(event): """ Handle an event that is generated by a person clicking a button. """ global boxRoot, __widgetTexts, __replyButtonText __replyButtonText = __widgetTexts[event.widget] boxRoot.quit() # quit the main loop def __put_buttons_in_buttonframe(choices): """Put the buttons in the buttons frame """ global __widgetTexts, __firstWidget, buttonsFrame __firstWidget = None __widgetTexts = {} i = 0 for buttonText in choices: tempButton = Button(buttonsFrame, takefocus=1, text=buttonText) bindArrows(tempButton) tempButton.pack(expand=YES, side=LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m') # remember the text associated with this widget __widgetTexts[tempButton] = buttonText # remember the first widget, so we can put the focus there if i == 0: __firstWidget = tempButton i = 1 # for the commandButton, bind activation events to the activation event handler commandButton = tempButton handler = __buttonEvent for selectionEvent in STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) #----------------------------------------------------------------------- # # class EgStore # #----------------------------------------------------------------------- class EgStore: r""" A class to support persistent storage. You can use EgStore to support the storage and retrieval of user settings for an EasyGui application. # Example A #----------------------------------------------------------------------- # define a class named Settings as a subclass of EgStore #----------------------------------------------------------------------- class Settings(EgStore): :: def __init__(self, filename): # filename is required #------------------------------------------------- # Specify default/initial values for variables that # this particular application wants to remember. #------------------------------------------------- self.userId = "" self.targetServer = "" #------------------------------------------------- # For subclasses of EgStore, these must be # the last two statements in __init__ #------------------------------------------------- self.filename = filename # this is required self.restore() # restore values from the storage file if possible # Example B #----------------------------------------------------------------------- # create settings, a persistent Settings object #----------------------------------------------------------------------- settingsFile = "myApp_settings.txt" settings = Settings(settingsFile) user = "obama_barak" server = "whitehouse1" settings.userId = user settings.targetServer = server settings.store() # persist the settings # run code that gets a new value for userId, and persist the settings user = "biden_joe" settings.userId = user settings.store() # Example C #----------------------------------------------------------------------- # recover the Settings instance, change an attribute, and store it again. #----------------------------------------------------------------------- settings = Settings(settingsFile) settings.userId = "vanrossum_g" settings.store() """ def __init__(self, filename): # obtaining filename is required self.filename = None raise NotImplementedError() def restore(self): """ Set the values of whatever attributes are recoverable from the pickle file. Populate the attributes (the __dict__) of the EgStore object from the attributes (the __dict__) of the pickled object. If the pickled object has attributes that have been initialized in the EgStore object, then those attributes of the EgStore object will be replaced by the values of the corresponding attributes in the pickled object. If the pickled object is missing some attributes that have been initialized in the EgStore object, then those attributes of the EgStore object will retain the values that they were initialized with. If the pickled object has some attributes that were not initialized in the EgStore object, then those attributes will be ignored. IN SUMMARY: After the recover() operation, the EgStore object will have all, and only, the attributes that it had when it was initialized. Where possible, those attributes will have values recovered from the pickled object. """ if not os.path.exists(self.filename): return self if not os.path.isfile(self.filename): return self try: f = open(self.filename,"rb") unpickledObject = pickle.load(f) f.close() for key in list(self.__dict__.keys()): default = self.__dict__[key] self.__dict__[key] = unpickledObject.__dict__.get(key,default) except: pass return self def store(self): """ Save the attributes of the EgStore object to a pickle file. Note that if the directory for the pickle file does not already exist, the store operation will fail. """ f = open(self.filename, "wb") pickle.dump(self, f) f.close() def kill(self): """ Delete my persistent file (i.e. pickle file), if it exists. """ if os.path.isfile(self.filename): os.remove(self.filename) return def __str__(self): """ return my contents as a string in an easy-to-read format. """ # find the length of the longest attribute name longest_key_length = 0 keys = [] for key in self.__dict__.keys(): keys.append(key) longest_key_length = max(longest_key_length, len(key)) keys.sort() # sort the attribute names lines = [] for key in keys: value = self.__dict__[key] key = key.ljust(longest_key_length) lines.append("%s : %s\n" % (key,repr(value)) ) return "".join(lines) # return a string showing the attributes #----------------------------------------------------------------------- # # test/demo easygui # #----------------------------------------------------------------------- def egdemo(): """ Run the EasyGui demo. """ # clear the console writeln("\n" * 100) intro_message = ("Pick the kind of box that you wish to demo.\n" + "\n * Python version " + sys.version + "\n * EasyGui version " + egversion + "\n * Tk version " + str(TkVersion) ) #========================================== END DEMONSTRATION DATA while 1: # do forever choices = [ "msgbox", "buttonbox", "buttonbox(image) -- a buttonbox that displays an image", "choicebox", "multchoicebox", "textbox", "ynbox", "ccbox", "enterbox", "enterbox(image) -- an enterbox that displays an image", "exceptionbox", "codebox", "integerbox", "boolbox", "indexbox", "filesavebox", "fileopenbox", "passwordbox", "multenterbox", "multpasswordbox", "diropenbox", "About EasyGui", " Help" ] choice = choicebox(msg=intro_message , title="EasyGui " + egversion , choices=choices) if not choice: return reply = choice.split() if reply[0] == "msgbox": reply = msgbox("short msg", "This is a long title") writeln("Reply was: %s" % repr(reply)) elif reply[0] == "About": reply = abouteasygui() elif reply[0] == "Help": _demo_help() elif reply[0] == "buttonbox": reply = buttonbox() writeln("Reply was: %s" % repr(reply)) title = "Demo of Buttonbox with many, many buttons!" msg = "This buttonbox shows what happens when you specify too many buttons." reply = buttonbox(msg=msg, title=title, choices=choices) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "buttonbox(image)": _demo_buttonbox_with_image() elif reply[0] == "boolbox": reply = boolbox() writeln("Reply was: %s" % repr(reply)) elif reply[0] == "enterbox": image = "python_and_check_logo.gif" message = "Enter the name of your best friend."\ "\n(Result will be stripped.)" reply = enterbox(message, "Love!", " Suzy Smith ") writeln("Reply was: %s" % repr(reply)) message = "Enter the name of your best friend."\ "\n(Result will NOT be stripped.)" reply = enterbox(message, "Love!", " Suzy Smith ",strip=False) writeln("Reply was: %s" % repr(reply)) reply = enterbox("Enter the name of your worst enemy:", "Hate!") writeln("Reply was: %s" % repr(reply)) elif reply[0] == "enterbox(image)": image = "python_and_check_logo.gif" message = "What kind of snake is this?" reply = enterbox(message, "Quiz",image=image) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "exceptionbox": try: thisWillCauseADivideByZeroException = 1/0 except: exceptionbox() elif reply[0] == "integerbox": reply = integerbox( "Enter a number between 3 and 333", "Demo: integerbox WITH a default value", 222, 3, 333) writeln("Reply was: %s" % repr(reply)) reply = integerbox( "Enter a number between 0 and 99", "Demo: integerbox WITHOUT a default value" ) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "diropenbox" : _demo_diropenbox() elif reply[0] == "fileopenbox": _demo_fileopenbox() elif reply[0] == "filesavebox": _demo_filesavebox() elif reply[0] == "indexbox": title = reply[0] msg = "Demo of " + reply[0] choices = ["Choice1", "Choice2", "Choice3", "Choice4"] reply = indexbox(msg, title, choices) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "passwordbox": reply = passwordbox("Demo of password box WITHOUT default" + "\n\nEnter your secret password", "Member Logon") writeln("Reply was: %s" % str(reply)) reply = passwordbox("Demo of password box WITH default" + "\n\nEnter your secret password", "Member Logon", "alfie") writeln("Reply was: %s" % str(reply)) elif reply[0] == "multenterbox": msg = "Enter your personal information" title = "Credit Card Application" fieldNames = ["Name","Street Address","City","State","ZipCode"] fieldValues = [] # we start with blanks for the values fieldValues = multenterbox(msg,title, fieldNames) # make sure that none of the fields was left blank while 1: if fieldValues == None: break errmsg = "" for i in range(len(fieldNames)): if fieldValues[i].strip() == "": errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i]) if errmsg == "": break # no problems found fieldValues = multenterbox(errmsg, title, fieldNames, fieldValues) writeln("Reply was: %s" % str(fieldValues)) elif reply[0] == "multpasswordbox": msg = "Enter logon information" title = "Demo of multpasswordbox" fieldNames = ["Server ID", "User ID", "Password"] fieldValues = [] # we start with blanks for the values fieldValues = multpasswordbox(msg,title, fieldNames) # make sure that none of the fields was left blank while 1: if fieldValues == None: break errmsg = "" for i in range(len(fieldNames)): if fieldValues[i].strip() == "": errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i]) if errmsg == "": break # no problems found fieldValues = multpasswordbox(errmsg, title, fieldNames, fieldValues) writeln("Reply was: %s" % str(fieldValues)) elif reply[0] == "ynbox": title = "Demo of ynbox" msg = "Were you expecting the Spanish Inquisition?" reply = ynbox(msg, title) writeln("Reply was: %s" % repr(reply)) if reply: msgbox("NOBODY expects the Spanish Inquisition!", "Wrong!") elif reply[0] == "ccbox": title = "Demo of ccbox" reply = ccbox(msg,title) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "choicebox": title = "Demo of choicebox" longchoice = "This is an example of a very long option which you may or may not wish to choose."*2 listChoices = ["nnn", "ddd", "eee", "fff", "aaa", longchoice , "aaa", "bbb", "ccc", "ggg", "hhh", "iii", "jjj", "kkk", "LLL", "mmm" , "nnn", "ooo", "ppp", "qqq", "rrr", "sss", "ttt", "uuu", "vvv"] msg = "Pick something. " + ("A wrapable sentence of text ?! "*30) + "\nA separate line of text."*6 reply = choicebox(msg=msg, choices=listChoices) writeln("Reply was: %s" % repr(reply)) msg = "Pick something. " reply = choicebox(msg=msg, title=title, choices=listChoices) writeln("Reply was: %s" % repr(reply)) msg = "Pick something. " reply = choicebox(msg="The list of choices is empty!", choices=[]) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "multchoicebox": listChoices = ["aaa", "bbb", "ccc", "ggg", "hhh", "iii", "jjj", "kkk" , "LLL", "mmm" , "nnn", "ooo", "ppp", "qqq" , "rrr", "sss", "ttt", "uuu", "vvv"] msg = "Pick as many choices as you wish." reply = multchoicebox(msg,"Demo of multchoicebox", listChoices) writeln("Reply was: %s" % repr(reply)) elif reply[0] == "textbox": _demo_textbox(reply[0]) elif reply[0] == "codebox": _demo_codebox(reply[0]) else: msgbox("Choice\n\n" + choice + "\n\nis not recognized", "Program Logic Error") return def _demo_textbox(reply): text_snippet = ((\ """It was the best of times, and it was the worst of times. The rich ate cake, and the poor had cake recommended to them, but wished only for enough cash to buy bread. The time was ripe for revolution! """ \ *5)+"\n\n")*10 title = "Demo of textbox" msg = "Here is some sample text. " * 16 reply = textbox(msg, title, text_snippet) writeln("Reply was: %s" % str(reply)) def _demo_codebox(reply): code_snippet = ("dafsdfa dasflkj pp[oadsij asdfp;ij asdfpjkop asdfpok asdfpok asdfpok"*3) +"\n"+\ """# here is some dummy Python code for someItem in myListOfStuff: do something(someItem) do something() do something() if somethingElse(someItem): doSomethingEvenMoreInteresting() """*16 msg = "Here is some sample code. " * 16 reply = codebox(msg, "Code Sample", code_snippet) writeln("Reply was: %s" % repr(reply)) def _demo_buttonbox_with_image(): msg = "Do you like this picture?\nIt is " choices = ["Yes","No","No opinion"] for image in [ "python_and_check_logo.gif" ,"python_and_check_logo.jpg" ,"python_and_check_logo.png" ,"zzzzz.gif"]: reply=buttonbox(msg + image,image=image,choices=choices) writeln("Reply was: %s" % repr(reply)) def _demo_help(): savedStdout = sys.stdout # save the sys.stdout file object sys.stdout = capturedOutput = StringIO() help("easygui") sys.stdout = savedStdout # restore the sys.stdout file object codebox("EasyGui Help",text=capturedOutput.getvalue()) def _demo_filesavebox(): filename = "myNewFile.txt" title = "File SaveAs" msg ="Save file as:" f = filesavebox(msg,title,default=filename) writeln("You chose to save file: %s" % f) def _demo_diropenbox(): title = "Demo of diropenbox" msg = "Pick the directory that you wish to open." d = diropenbox(msg, title) writeln("You chose directory...: %s" % d) d = diropenbox(msg, title,default="./") writeln("You chose directory...: %s" % d) d = diropenbox(msg, title,default="c:/") writeln("You chose directory...: %s" % d) def _demo_fileopenbox(): msg = "Python files" title = "Open files" default="*.py" f = fileopenbox(msg,title,default=default) writeln("You chose to open file: %s" % f) default="./*.gif" filetypes = ["*.jpg",["*.zip","*.tgs","*.gz", "Archive files"],["*.htm", "*.html","HTML files"]] f = fileopenbox(msg,title,default=default,filetypes=filetypes) writeln("You chose to open file: %s" % f) """#deadcode -- testing ---------------------------------------- f = fileopenbox(None,None,default=default) writeln("You chose to open file: %s" % f) f = fileopenbox(None,title,default=default) writeln("You chose to open file: %s" % f) f = fileopenbox(msg,None,default=default) writeln("You chose to open file: %s" % f) f = fileopenbox(default=default) writeln("You chose to open file: %s" % f) f = fileopenbox(default=None) writeln("You chose to open file: %s" % f) #----------------------------------------------------deadcode """ def _dummy(): pass EASYGUI_ABOUT_INFORMATION = ''' ======================================================================== 0.96(2010-08-29) ======================================================================== This version fixes some problems with version independence. BUG FIXES ------------------------------------------------------ * A statement with Python 2.x-style exception-handling syntax raised a syntax error when running under Python 3.x. Thanks to David Williams for reporting this problem. * Under some circumstances, PIL was unable to display non-gif images that it should have been able to display. The cause appears to be non-version-independent import syntax. PIL modules are now imported with a version-independent syntax. Thanks to Horst Jens for reporting this problem. LICENSE CHANGE ------------------------------------------------------ Starting with this version, EasyGui is licensed under what is generally known as the "modified BSD license" (aka "revised BSD", "new BSD", "3-clause BSD"). This license is GPL-compatible but less restrictive than GPL. Earlier versions were licensed under the Creative Commons Attribution License 2.0. ======================================================================== 0.95(2010-06-12) ======================================================================== ENHANCEMENTS ------------------------------------------------------ * Previous versions of EasyGui could display only .gif image files using the msgbox "image" argument. This version can now display all image-file formats supported by PIL the Python Imaging Library) if PIL is installed. If msgbox is asked to open a non-gif image file, it attempts to import PIL and to use PIL to convert the image file to a displayable format. If PIL cannot be imported (probably because PIL is not installed) EasyGui displays an error message saying that PIL must be installed in order to display the image file. Note that http://www.pythonware.com/products/pil/ says that PIL doesn't yet support Python 3.x. ======================================================================== 0.94(2010-06-06) ======================================================================== ENHANCEMENTS ------------------------------------------------------ * The codebox and textbox functions now return the contents of the box, rather than simply the name of the button ("Yes"). This makes it possible to use codebox and textbox as data-entry widgets. A big "thank you!" to Dominic Comtois for requesting this feature, patiently explaining his requirement, and helping to discover the tkinter techniques to implement it. NOTE THAT in theory this change breaks backward compatibility. But because (in previous versions of EasyGui) the value returned by codebox and textbox was meaningless, no application should have been checking it. So in actual practice, this change should not break backward compatibility. * Added support for SPACEBAR to command buttons. Now, when keyboard focus is on a command button, a press of the SPACEBAR will act like a press of the ENTER key; it will activate the command button. * Added support for keyboard navigation with the arrow keys (up,down,left,right) to the fields and buttons in enterbox, multenterbox and multpasswordbox, and to the buttons in choicebox and all buttonboxes. * added highlightthickness=2 to entry fields in multenterbox and multpasswordbox. Now it is easier to tell which entry field has keyboard focus. BUG FIXES ------------------------------------------------------ * In EgStore, the pickle file is now opened with "rb" and "wb" rather than with "r" and "w". This change is necessary for compatibility with Python 3+. Thanks to Marshall Mattingly for reporting this problem and providing the fix. * In integerbox, the actual argument names did not match the names described in the docstring. Thanks to Daniel Zingaro of at University of Toronto for reporting this problem. * In integerbox, the "argLowerBound" and "argUpperBound" arguments have been renamed to "lowerbound" and "upperbound" and the docstring has been corrected. NOTE THAT THIS CHANGE TO THE ARGUMENT-NAMES BREAKS BACKWARD COMPATIBILITY. If argLowerBound or argUpperBound are used, an AssertionError with an explanatory error message is raised. * In choicebox, the signature to choicebox incorrectly showed choicebox as accepting a "buttons" argument. The signature has been fixed. ======================================================================== 0.93(2009-07-07) ======================================================================== ENHANCEMENTS ------------------------------------------------------ * Added exceptionbox to display stack trace of exceptions * modified names of some font-related constants to make it easier to customize them ======================================================================== 0.92(2009-06-22) ======================================================================== ENHANCEMENTS ------------------------------------------------------ * Added EgStore class to to provide basic easy-to-use persistence. BUG FIXES ------------------------------------------------------ * Fixed a bug that was preventing Linux users from copying text out of a textbox and a codebox. This was not a problem for Windows users. ''' def abouteasygui(): """ shows the easygui revision history """ codebox("About EasyGui\n"+egversion,"EasyGui",EASYGUI_ABOUT_INFORMATION) return None if __name__ == '__main__': if True: egdemo() else: # test the new root feature root = Tk() msg = """This is a test of a main Tk() window in which we will place an easygui msgbox. It will be an interesting experiment.\n\n""" messageWidget = Message(root, text=msg, width=1000) messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m') messageWidget = Message(root, text=msg, width=1000) messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m') msgbox("this is a test of passing in boxRoot", root=root) msgbox("this is a second test of passing in boxRoot", root=root) reply = enterbox("Enter something", root=root) writeln("You wrote:", reply) reply = enterbox("Enter something else", root=root) writeln("You wrote:", reply) root.destroy()
agpl-3.0
deeplook/bokeh
sphinx/source/docs/user_guide/source_examples/interaction_callbacks_for_selections.py
27
1057
from random import random from bokeh.models import CustomJS, ColumnDataSource from bokeh.plotting import hplot, figure, output_file, show output_file("callback.html") x = [random() for x in range(500)] y = [random() for y in range(500)] s1 = ColumnDataSource(data=dict(x=x, y=y)) p1 = figure(plot_width=400, plot_height=400, tools="lasso_select", title="Select Here") p1.circle('x', 'y', source=s1, alpha=0.6) s2 = ColumnDataSource(data=dict(x=[], y=[])) p2 = figure(plot_width=400, plot_height=400, x_range=(0, 1), y_range=(0, 1), tools="", title="Watch Here") p2.circle('x', 'y', source=s2, alpha=0.6) s1.callback = CustomJS(args=dict(s2=s2), code=""" var inds = cb_obj.get('selected')['1d'].indices; var d1 = cb_obj.get('data'); var d2 = s2.get('data'); d2['x'] = [] d2['y'] = [] for (i = 0; i < inds.length; i++) { d2['x'].push(d1['x'][inds[i]]) d2['y'].push(d1['y'][inds[i]]) } s2.trigger('change'); """) layout = hplot(p1, p2) show(layout)
bsd-3-clause
bdh1011/wau
venv/lib/python2.7/site-packages/twisted/internet/_threadedselect.py
9
12319
# -*- test-case-name: twisted.test.test_internet -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Threaded select reactor The threadedselectreactor is a specialized reactor for integrating with arbitrary foreign event loop, such as those you find in GUI toolkits. There are three things you'll need to do to use this reactor. Install the reactor at the beginning of your program, before importing the rest of Twisted:: | from twisted.internet import _threadedselect | _threadedselect.install() Interleave this reactor with your foreign event loop, at some point after your event loop is initialized:: | from twisted.internet import reactor | reactor.interleave(foreignEventLoopWakerFunction) | self.addSystemEventTrigger('after', 'shutdown', foreignEventLoopStop) Instead of shutting down the foreign event loop directly, shut down the reactor:: | from twisted.internet import reactor | reactor.stop() In order for Twisted to do its work in the main thread (the thread that interleave is called from), a waker function is necessary. The waker function will be called from a "background" thread with one argument: func. The waker function's purpose is to call func() from the main thread. Many GUI toolkits ship with appropriate waker functions. Some examples of this are wxPython's wx.callAfter (may be wxCallAfter in older versions of wxPython) or PyObjC's PyObjCTools.AppHelper.callAfter. These would be used in place of "foreignEventLoopWakerFunction" in the above example. The other integration point at which the foreign event loop and this reactor must integrate is shutdown. In order to ensure clean shutdown of Twisted, you must allow for Twisted to come to a complete stop before quitting the application. Typically, you will do this by setting up an after shutdown trigger to stop your foreign event loop, and call reactor.stop() where you would normally have initiated the shutdown procedure for the foreign event loop. Shutdown functions that could be used in place of "foreignEventloopStop" would be the ExitMainLoop method of the wxApp instance with wxPython, or the PyObjCTools.AppHelper.stopEventLoop function. """ from threading import Thread from Queue import Queue, Empty import sys from zope.interface import implements from twisted.internet.interfaces import IReactorFDSet from twisted.internet import posixbase from twisted.internet.posixbase import _NO_FILENO, _NO_FILEDESC from twisted.python import log, failure, threadable import select from errno import EINTR, EBADF from twisted.internet.selectreactor import _select def dictRemove(dct, value): try: del dct[value] except KeyError: pass def raiseException(e): raise e class ThreadedSelectReactor(posixbase.PosixReactorBase): """A threaded select() based reactor - runs on all POSIX platforms and on Win32. """ implements(IReactorFDSet) def __init__(self): threadable.init(1) self.reads = {} self.writes = {} self.toThreadQueue = Queue() self.toMainThread = Queue() self.workerThread = None self.mainWaker = None posixbase.PosixReactorBase.__init__(self) self.addSystemEventTrigger('after', 'shutdown', self._mainLoopShutdown) def wakeUp(self): # we want to wake up from any thread self.waker.wakeUp() def callLater(self, *args, **kw): tple = posixbase.PosixReactorBase.callLater(self, *args, **kw) self.wakeUp() return tple def _sendToMain(self, msg, *args): #print >>sys.stderr, 'sendToMain', msg, args self.toMainThread.put((msg, args)) if self.mainWaker is not None: self.mainWaker() def _sendToThread(self, fn, *args): #print >>sys.stderr, 'sendToThread', fn, args self.toThreadQueue.put((fn, args)) def _preenDescriptorsInThread(self): log.msg("Malformed file descriptor found. Preening lists.") readers = self.reads.keys() writers = self.writes.keys() self.reads.clear() self.writes.clear() for selDict, selList in ((self.reads, readers), (self.writes, writers)): for selectable in selList: try: select.select([selectable], [selectable], [selectable], 0) except: log.msg("bad descriptor %s" % selectable) else: selDict[selectable] = 1 def _workerInThread(self): try: while 1: fn, args = self.toThreadQueue.get() #print >>sys.stderr, "worker got", fn, args fn(*args) except SystemExit: pass # exception indicates this thread should exit except: f = failure.Failure() self._sendToMain('Failure', f) #print >>sys.stderr, "worker finished" def _doSelectInThread(self, timeout): """Run one iteration of the I/O monitor loop. This will run all selectables who had input or output readiness waiting for them. """ reads = self.reads writes = self.writes while 1: try: r, w, ignored = _select(reads.keys(), writes.keys(), [], timeout) break except ValueError: # Possibly a file descriptor has gone negative? log.err() self._preenDescriptorsInThread() except TypeError: # Something *totally* invalid (object w/o fileno, non-integral # result) was passed log.err() self._preenDescriptorsInThread() except (select.error, IOError), se: # select(2) encountered an error if se.args[0] in (0, 2): # windows does this if it got an empty list if (not reads) and (not writes): return else: raise elif se.args[0] == EINTR: return elif se.args[0] == EBADF: self._preenDescriptorsInThread() else: # OK, I really don't know what's going on. Blow up. raise self._sendToMain('Notify', r, w) def _process_Notify(self, r, w): #print >>sys.stderr, "_process_Notify" reads = self.reads writes = self.writes _drdw = self._doReadOrWrite _logrun = log.callWithLogger for selectables, method, dct in ((r, "doRead", reads), (w, "doWrite", writes)): for selectable in selectables: # if this was disconnected in another thread, kill it. if selectable not in dct: continue # This for pausing input when we're not ready for more. _logrun(selectable, _drdw, selectable, method, dct) #print >>sys.stderr, "done _process_Notify" def _process_Failure(self, f): f.raiseException() _doIterationInThread = _doSelectInThread def ensureWorkerThread(self): if self.workerThread is None or not self.workerThread.isAlive(): self.workerThread = Thread(target=self._workerInThread) self.workerThread.start() def doThreadIteration(self, timeout): self._sendToThread(self._doIterationInThread, timeout) self.ensureWorkerThread() #print >>sys.stderr, 'getting...' msg, args = self.toMainThread.get() #print >>sys.stderr, 'got', msg, args getattr(self, '_process_' + msg)(*args) doIteration = doThreadIteration def _interleave(self): while self.running: #print >>sys.stderr, "runUntilCurrent" self.runUntilCurrent() t2 = self.timeout() t = self.running and t2 self._sendToThread(self._doIterationInThread, t) #print >>sys.stderr, "yielding" yield None #print >>sys.stderr, "fetching" msg, args = self.toMainThread.get_nowait() getattr(self, '_process_' + msg)(*args) def interleave(self, waker, *args, **kw): """ interleave(waker) interleaves this reactor with the current application by moving the blocking parts of the reactor (select() in this case) to a separate thread. This is typically useful for integration with GUI applications which have their own event loop already running. See the module docstring for more information. """ self.startRunning(*args, **kw) loop = self._interleave() def mainWaker(waker=waker, loop=loop): #print >>sys.stderr, "mainWaker()" waker(loop.next) self.mainWaker = mainWaker loop.next() self.ensureWorkerThread() def _mainLoopShutdown(self): self.mainWaker = None if self.workerThread is not None: #print >>sys.stderr, 'getting...' self._sendToThread(raiseException, SystemExit) self.wakeUp() try: while 1: msg, args = self.toMainThread.get_nowait() #print >>sys.stderr, "ignored:", (msg, args) except Empty: pass self.workerThread.join() self.workerThread = None try: while 1: fn, args = self.toThreadQueue.get_nowait() if fn is self._doIterationInThread: log.msg('Iteration is still in the thread queue!') elif fn is raiseException and args[0] is SystemExit: pass else: fn(*args) except Empty: pass def _doReadOrWrite(self, selectable, method, dict): try: why = getattr(selectable, method)() handfn = getattr(selectable, 'fileno', None) if not handfn: why = _NO_FILENO elif handfn() == -1: why = _NO_FILEDESC except: why = sys.exc_info()[1] log.err() if why: self._disconnectSelectable(selectable, why, method == "doRead") def addReader(self, reader): """Add a FileDescriptor for notification of data available to read. """ self._sendToThread(self.reads.__setitem__, reader, 1) self.wakeUp() def addWriter(self, writer): """Add a FileDescriptor for notification of data available to write. """ self._sendToThread(self.writes.__setitem__, writer, 1) self.wakeUp() def removeReader(self, reader): """Remove a Selectable for notification of data available to read. """ self._sendToThread(dictRemove, self.reads, reader) def removeWriter(self, writer): """Remove a Selectable for notification of data available to write. """ self._sendToThread(dictRemove, self.writes, writer) def removeAll(self): return self._removeAll(self.reads, self.writes) def getReaders(self): return self.reads.keys() def getWriters(self): return self.writes.keys() def stop(self): """ Extend the base stop implementation to also wake up the select thread so that C{runUntilCurrent} notices the reactor should stop. """ posixbase.PosixReactorBase.stop(self) self.wakeUp() def run(self, installSignalHandlers=1): self.startRunning(installSignalHandlers=installSignalHandlers) self.mainLoop() def mainLoop(self): q = Queue() self.interleave(q.put) while self.running: try: q.get()() except StopIteration: break def install(): """Configure the twisted mainloop to be run using the select() reactor. """ reactor = ThreadedSelectReactor() from twisted.internet.main import installReactor installReactor(reactor) return reactor __all__ = ['install']
mit
amiguez/youtube-dl
youtube_dl/extractor/srf.py
102
4254
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, parse_iso8601, xpath_text, ) class SrfIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.srf\.ch/play(?:er)?/tv/[^/]+/video/(?P<display_id>[^?]+)\?id=|tp\.srgssr\.ch/p/flash\?urn=urn:srf:ais:video:)(?P<id>[0-9a-f\-]{36})' _TESTS = [{ 'url': 'http://www.srf.ch/play/tv/10vor10/video/snowden-beantragt-asyl-in-russland?id=28e1a57d-5b76-4399-8ab3-9097f071e6c5', 'md5': '4cd93523723beff51bb4bee974ee238d', 'info_dict': { 'id': '28e1a57d-5b76-4399-8ab3-9097f071e6c5', 'display_id': 'snowden-beantragt-asyl-in-russland', 'ext': 'm4v', 'upload_date': '20130701', 'title': 'Snowden beantragt Asyl in Russland', 'timestamp': 1372713995, } }, { # No Speichern (Save) button 'url': 'http://www.srf.ch/play/tv/top-gear/video/jaguar-xk120-shadow-und-tornado-dampflokomotive?id=677f5829-e473-4823-ac83-a1087fe97faa', 'md5': 'd97e236e80d1d24729e5d0953d276a4f', 'info_dict': { 'id': '677f5829-e473-4823-ac83-a1087fe97faa', 'display_id': 'jaguar-xk120-shadow-und-tornado-dampflokomotive', 'ext': 'flv', 'upload_date': '20130710', 'title': 'Jaguar XK120, Shadow und Tornado-Dampflokomotive', 'timestamp': 1373493600, }, }, { 'url': 'http://www.srf.ch/player/tv/10vor10/video/snowden-beantragt-asyl-in-russland?id=28e1a57d-5b76-4399-8ab3-9097f071e6c5', 'only_matching': True, }, { 'url': 'https://tp.srgssr.ch/p/flash?urn=urn:srf:ais:video:28e1a57d-5b76-4399-8ab3-9097f071e6c5', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) display_id = re.match(self._VALID_URL, url).group('display_id') or video_id video_data = self._download_xml( 'http://il.srgssr.ch/integrationlayer/1.0/ue/srf/video/play/%s.xml' % video_id, display_id) title = xpath_text( video_data, './AssetMetadatas/AssetMetadata/title', fatal=True) thumbnails = [{ 'url': s.text } for s in video_data.findall('.//ImageRepresentation/url')] timestamp = parse_iso8601(xpath_text(video_data, './createdDate')) # The <duration> field in XML is different from the exact duration, skipping formats = [] for item in video_data.findall('./Playlists/Playlist') + video_data.findall('./Downloads/Download'): for url_node in item.findall('url'): quality = url_node.attrib['quality'] full_url = url_node.text original_ext = determine_ext(full_url) format_id = '%s-%s' % (quality, item.attrib['protocol']) if original_ext == 'f4m': formats.extend(self._extract_f4m_formats( full_url + '?hdcore=3.4.0', display_id, f4m_id=format_id)) elif original_ext == 'm3u8': formats.extend(self._extract_m3u8_formats( full_url, display_id, 'mp4', m3u8_id=format_id)) else: formats.append({ 'url': full_url, 'ext': original_ext, 'format_id': format_id, 'quality': 0 if 'HD' in quality else -1, 'preference': 1, }) self._sort_formats(formats) subtitles = {} subtitles_data = video_data.find('Subtitles') if subtitles_data is not None: subtitles_list = [{ 'url': sub.text, 'ext': determine_ext(sub.text), } for sub in subtitles_data] if subtitles_list: subtitles['de'] = subtitles_list return { 'id': video_id, 'display_id': display_id, 'formats': formats, 'title': title, 'thumbnails': thumbnails, 'timestamp': timestamp, 'subtitles': subtitles, }
unlicense
abhitopia/tensorflow
tensorflow/contrib/learn/python/learn/estimators/__init__.py
17
12228
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """An estimator is a rule for calculating an estimate of a given quantity. # Estimators * **Estimators** are used to train and evaluate TensorFlow models. They support regression and classification problems. * **Classifiers** are functions that have discrete outcomes. * **Regressors** are functions that predict continuous values. ## Choosing the correct estimator * For **Regression** problems use one of the following: * `LinearRegressor`: Uses linear model. * `DNNRegressor`: Uses DNN. * `DNNLinearCombinedRegressor`: Uses Wide & Deep. * `TensorForestEstimator`: Uses RandomForest. See tf.contrib.tensor_forest.client.random_forest.TensorForestEstimator. * `Estimator`: Use when you need a custom model. * For **Classification** problems use one of the following: * `LinearClassifier`: Multiclass classifier using Linear model. * `DNNClassifier`: Multiclass classifier using DNN. * `DNNLinearCombinedClassifier`: Multiclass classifier using Wide & Deep. * `TensorForestEstimator`: Uses RandomForest. See tf.contrib.tensor_forest.client.random_forest.TensorForestEstimator. * `SVM`: Binary classifier using linear SVMs. * `LogisticRegressor`: Use when you need custom model for binary classification. * `Estimator`: Use when you need custom model for N class classification. ## Pre-canned Estimators Pre-canned estimators are machine learning estimators premade for general purpose problems. If you need more customization, you can always write your own custom estimator as described in the section below. Pre-canned estimators are tested and optimized for speed and quality. ### Define the feature columns Here are some possible types of feature columns used as inputs to a pre-canned estimator. Feature columns may vary based on the estimator used. So you can see which feature columns are fed to each estimator in the below section. ```python sparse_feature_a = sparse_column_with_keys( column_name="sparse_feature_a", keys=["AB", "CD", ...]) embedding_feature_a = embedding_column( sparse_id_column=sparse_feature_a, dimension=3, combiner="sum") sparse_feature_b = sparse_column_with_hash_bucket( column_name="sparse_feature_b", hash_bucket_size=1000) embedding_feature_b = embedding_column( sparse_id_column=sparse_feature_b, dimension=16, combiner="sum") crossed_feature_a_x_b = crossed_column( columns=[sparse_feature_a, sparse_feature_b], hash_bucket_size=10000) real_feature = real_valued_column("real_feature") real_feature_buckets = bucketized_column( source_column=real_feature, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65]) ``` ### Create the pre-canned estimator DNNClassifier, DNNRegressor, and DNNLinearCombinedClassifier are all pretty similar to each other in how you use them. You can easily plug in an optimizer and/or regularization to those estimators. #### DNNClassifier A classifier for TensorFlow DNN models. ```python my_features = [embedding_feature_a, embedding_feature_b] estimator = DNNClassifier( feature_columns=my_features, hidden_units=[1024, 512, 256], optimizer=tf.train.ProximalAdagradOptimizer( learning_rate=0.1, l1_regularization_strength=0.001 )) ``` #### DNNRegressor A regressor for TensorFlow DNN models. ```python my_features = [embedding_feature_a, embedding_feature_b] estimator = DNNRegressor( feature_columns=my_features, hidden_units=[1024, 512, 256]) # Or estimator using the ProximalAdagradOptimizer optimizer with # regularization. estimator = DNNRegressor( feature_columns=my_features, hidden_units=[1024, 512, 256], optimizer=tf.train.ProximalAdagradOptimizer( learning_rate=0.1, l1_regularization_strength=0.001 )) ``` #### DNNLinearCombinedClassifier A classifier for TensorFlow Linear and DNN joined training models. * Wide and deep model * Multi class (2 by default) ```python my_linear_features = [crossed_feature_a_x_b] my_deep_features = [embedding_feature_a, embedding_feature_b] estimator = DNNLinearCombinedClassifier( # Common settings n_classes=n_classes, weight_column_name=weight_column_name, # Wide settings linear_feature_columns=my_linear_features, linear_optimizer=tf.train.FtrlOptimizer(...), # Deep settings dnn_feature_columns=my_deep_features, dnn_hidden_units=[1000, 500, 100], dnn_optimizer=tf.train.AdagradOptimizer(...)) ``` #### LinearClassifier Train a linear model to classify instances into one of multiple possible classes. When number of possible classes is 2, this is binary classification. ```python my_features = [sparse_feature_b, crossed_feature_a_x_b] estimator = LinearClassifier( feature_columns=my_features, optimizer=tf.train.FtrlOptimizer( learning_rate=0.1, l1_regularization_strength=0.001 )) ``` #### LinearRegressor Train a linear regression model to predict a label value given observation of feature values. ```python my_features = [sparse_feature_b, crossed_feature_a_x_b] estimator = LinearRegressor( feature_columns=my_features) ``` ### LogisticRegressor Logistic regression estimator for binary classification. ```python # See tf.contrib.learn.Estimator(...) for details on model_fn structure def my_model_fn(...): pass estimator = LogisticRegressor(model_fn=my_model_fn) # Input builders def input_fn_train: pass estimator.fit(input_fn=input_fn_train) estimator.predict(x=x) ``` #### SVM - Support Vector Machine Support Vector Machine (SVM) model for binary classification. Currently only linear SVMs are supported. ```python my_features = [real_feature, sparse_feature_a] estimator = SVM( example_id_column='example_id', feature_columns=my_features, l2_regularization=10.0) ``` #### DynamicRnnEstimator An `Estimator` that uses a recurrent neural network with dynamic unrolling. ```python problem_type = ProblemType.CLASSIFICATION # or REGRESSION prediction_type = PredictionType.SINGLE_VALUE # or MULTIPLE_VALUE estimator = DynamicRnnEstimator(problem_type, prediction_type, my_feature_columns) ``` ### Use the estimator There are two main functions for using estimators, one of which is for training, and one of which is for evaluation. You can specify different data sources for each one in order to use different datasets for train and eval. ```python # Input builders def input_fn_train: # returns x, Y ... estimator.fit(input_fn=input_fn_train) def input_fn_eval: # returns x, Y ... estimator.evaluate(input_fn=input_fn_eval) estimator.predict(x=x) ``` ## Creating Custom Estimator To create a custom `Estimator`, provide a function to `Estimator`'s constructor that builds your model (`model_fn`, below): ```python estimator = tf.contrib.learn.Estimator( model_fn=model_fn, model_dir=model_dir) # Where the model's data (e.g., checkpoints) # are saved. ``` Here is a skeleton of this function, with descriptions of its arguments and return values in the accompanying tables: ```python def model_fn(features, targets, mode, params): # Logic to do the following: # 1. Configure the model via TensorFlow operations # 2. Define the loss function for training/evaluation # 3. Define the training operation/optimizer # 4. Generate predictions return predictions, loss, train_op ``` You may use `mode` and check against `tf.contrib.learn.ModeKeys.{TRAIN, EVAL, INFER}` to parameterize `model_fn`. In the Further Reading section below, there is an end-to-end TensorFlow tutorial for building a custom estimator. ## Additional Estimators There is an additional estimators under `tensorflow.contrib.factorization.python.ops`: * Gaussian mixture model (GMM) clustering ## Further reading For further reading, there are several tutorials with relevant topics, including: * [Overview of linear models](../../../tutorials/linear/overview.md) * [Linear model tutorial](../../../tutorials/wide/index.md) * [Wide and deep learning tutorial](../../../tutorials/wide_and_deep/index.md) * [Custom estimator tutorial](../../../tutorials/estimators/index.md) * [Building input functions](../../../tutorials/input_fn/index.md) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.learn.python.learn.estimators._sklearn import NotFittedError from tensorflow.contrib.learn.python.learn.estimators.constants import ProblemType from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNClassifier from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNEstimator from tensorflow.contrib.learn.python.learn.estimators.dnn import DNNRegressor from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedClassifier from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedEstimator from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined import DNNLinearCombinedRegressor from tensorflow.contrib.learn.python.learn.estimators.dynamic_rnn_estimator import DynamicRnnEstimator from tensorflow.contrib.learn.python.learn.estimators.estimator import BaseEstimator from tensorflow.contrib.learn.python.learn.estimators.estimator import Estimator from tensorflow.contrib.learn.python.learn.estimators.estimator import infer_real_valued_columns_from_input from tensorflow.contrib.learn.python.learn.estimators.estimator import infer_real_valued_columns_from_input_fn from tensorflow.contrib.learn.python.learn.estimators.estimator import SKCompat from tensorflow.contrib.learn.python.learn.estimators.head import binary_svm_head from tensorflow.contrib.learn.python.learn.estimators.head import Head from tensorflow.contrib.learn.python.learn.estimators.head import multi_class_head from tensorflow.contrib.learn.python.learn.estimators.head import multi_head from tensorflow.contrib.learn.python.learn.estimators.head import multi_label_head from tensorflow.contrib.learn.python.learn.estimators.head import no_op_train_fn from tensorflow.contrib.learn.python.learn.estimators.head import poisson_regression_head from tensorflow.contrib.learn.python.learn.estimators.head import regression_head from tensorflow.contrib.learn.python.learn.estimators.kmeans import KMeansClustering from tensorflow.contrib.learn.python.learn.estimators.linear import LinearClassifier from tensorflow.contrib.learn.python.learn.estimators.linear import LinearEstimator from tensorflow.contrib.learn.python.learn.estimators.linear import LinearRegressor from tensorflow.contrib.learn.python.learn.estimators.logistic_regressor import LogisticRegressor from tensorflow.contrib.learn.python.learn.estimators.metric_key import MetricKey from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModeKeys from tensorflow.contrib.learn.python.learn.estimators.model_fn import ModelFnOps from tensorflow.contrib.learn.python.learn.estimators.prediction_key import PredictionKey from tensorflow.contrib.learn.python.learn.estimators.run_config import ClusterConfig from tensorflow.contrib.learn.python.learn.estimators.run_config import Environment from tensorflow.contrib.learn.python.learn.estimators.run_config import RunConfig from tensorflow.contrib.learn.python.learn.estimators.run_config import TaskType from tensorflow.contrib.learn.python.learn.estimators.svm import SVM
apache-2.0
hachreak/invenio-demosite
invenio_demosite/testsuite/regression/test_websearchadmin.py
7
3109
# -*- coding: utf-8 -*- # # This file is part of Invenio Demosite. # Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013 CERN. # # Invenio Demosite is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio Demosite is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """WebSearch Admin Regression Test Suite.""" __revision__ = "$Id$" from invenio.base.globals import cfg from invenio.testsuite import make_test_suite, run_test_suite, \ test_web_page_content, merge_error_messages, \ InvenioTestCase class WebSearchAdminWebPagesAvailabilityTest(InvenioTestCase): """Check WebSearch Admin web pages whether they are up or not.""" def test_websearch_admin_interface_pages_availability(self): """websearchadmin - availability of WebSearch Admin interface pages""" baseurl = cfg['CFG_SITE_URL'] + '/admin/websearch/websearchadmin.py' _exports = ['', '?mtype=perform_showall', '?mtype=perform_addcollection', '?mtype=perform_addcollectiontotree', '?mtype=perform_modifycollectiontree', '?mtype=perform_checkwebcollstatus', '?mtype=perform_checkcollectionstatus',] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend(test_web_page_content(url, username='guest', expected_text= 'Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='admin')) if error_messages: self.fail(merge_error_messages(error_messages)) return def test_websearch_admin_guide_availability(self): """websearchadmin - availability of WebSearch Admin guide pages""" url = cfg['CFG_SITE_URL'] + '/help/admin/websearch-admin-guide' error_messages = test_web_page_content(url, expected_text="WebSearch Admin Guide") if error_messages: self.fail(merge_error_messages(error_messages)) return TEST_SUITE = make_test_suite(WebSearchAdminWebPagesAvailabilityTest) if __name__ == "__main__": run_test_suite(TEST_SUITE, warn_user=True)
gpl-2.0
theanalyst/cinder
cinder/api/v1/snapshot_metadata.py
8
6230
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob from cinder.api import common from cinder.api.openstack import wsgi from cinder import exception from cinder import volume from webob import exc class Controller(wsgi.Controller): """The snapshot metadata API controller for the OpenStack API.""" def __init__(self): self.volume_api = volume.API() super(Controller, self).__init__() def _get_metadata(self, context, snapshot_id): try: snapshot = self.volume_api.get_snapshot(context, snapshot_id) meta = self.volume_api.get_snapshot_metadata(context, snapshot) except exception.SnapshotNotFound: msg = _('snapshot does not exist') raise exc.HTTPNotFound(explanation=msg) return meta @wsgi.serializers(xml=common.MetadataTemplate) def index(self, req, snapshot_id): """Returns the list of metadata for a given snapshot.""" context = req.environ['cinder.context'] return {'metadata': self._get_metadata(context, snapshot_id)} @wsgi.serializers(xml=common.MetadataTemplate) @wsgi.deserializers(xml=common.MetadataDeserializer) def create(self, req, snapshot_id, body): try: metadata = body['metadata'] except (KeyError, TypeError): msg = _("Malformed request body") raise exc.HTTPBadRequest(explanation=msg) context = req.environ['cinder.context'] new_metadata = self._update_snapshot_metadata(context, snapshot_id, metadata, delete=False) return {'metadata': new_metadata} @wsgi.serializers(xml=common.MetaItemTemplate) @wsgi.deserializers(xml=common.MetaItemDeserializer) def update(self, req, snapshot_id, id, body): try: meta_item = body['meta'] except (TypeError, KeyError): expl = _('Malformed request body') raise exc.HTTPBadRequest(explanation=expl) if id not in meta_item: expl = _('Request body and URI mismatch') raise exc.HTTPBadRequest(explanation=expl) if len(meta_item) > 1: expl = _('Request body contains too many items') raise exc.HTTPBadRequest(explanation=expl) context = req.environ['cinder.context'] self._update_snapshot_metadata(context, snapshot_id, meta_item, delete=False) return {'meta': meta_item} @wsgi.serializers(xml=common.MetadataTemplate) @wsgi.deserializers(xml=common.MetadataDeserializer) def update_all(self, req, snapshot_id, body): try: metadata = body['metadata'] except (TypeError, KeyError): expl = _('Malformed request body') raise exc.HTTPBadRequest(explanation=expl) context = req.environ['cinder.context'] new_metadata = self._update_snapshot_metadata(context, snapshot_id, metadata, delete=True) return {'metadata': new_metadata} def _update_snapshot_metadata(self, context, snapshot_id, metadata, delete=False): try: snapshot = self.volume_api.get_snapshot(context, snapshot_id) return self.volume_api.update_snapshot_metadata(context, snapshot, metadata, delete) except exception.SnapshotNotFound: msg = _('snapshot does not exist') raise exc.HTTPNotFound(explanation=msg) except (ValueError, AttributeError): msg = _("Malformed request body") raise exc.HTTPBadRequest(explanation=msg) except exception.InvalidVolumeMetadata as error: raise exc.HTTPBadRequest(explanation=error.msg) except exception.InvalidVolumeMetadataSize as error: raise exc.HTTPRequestEntityTooLarge(explanation=error.msg) @wsgi.serializers(xml=common.MetaItemTemplate) def show(self, req, snapshot_id, id): """Return a single metadata item.""" context = req.environ['cinder.context'] data = self._get_metadata(context, snapshot_id) try: return {'meta': {id: data[id]}} except KeyError: msg = _("Metadata item was not found") raise exc.HTTPNotFound(explanation=msg) def delete(self, req, snapshot_id, id): """Deletes an existing metadata.""" context = req.environ['cinder.context'] metadata = self._get_metadata(context, snapshot_id) if id not in metadata: msg = _("Metadata item was not found") raise exc.HTTPNotFound(explanation=msg) try: snapshot = self.volume_api.get_snapshot(context, snapshot_id) self.volume_api.delete_snapshot_metadata(context, snapshot, id) except exception.SnapshotNotFound: msg = _('snapshot does not exist') raise exc.HTTPNotFound(explanation=msg) return webob.Response(status_int=200) def create_resource(): return wsgi.Resource(Controller())
apache-2.0
gunchleoc/django
django/db/backends/sqlite3/features.py
194
2656
from __future__ import unicode_literals from django.db import utils from django.db.backends.base.features import BaseDatabaseFeatures from django.utils import six from django.utils.functional import cached_property from .base import Database try: import pytz except ImportError: pytz = None class DatabaseFeatures(BaseDatabaseFeatures): # SQLite cannot handle us only partially reading from a cursor's result set # and then writing the same rows to the database in another cursor. This # setting ensures we always read result sets fully into memory all in one # go. can_use_chunked_reads = False test_db_allows_multiple_connections = False supports_unspecified_pk = True supports_timezones = False supports_1000_query_parameters = False supports_mixed_date_datetime_comparisons = False has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False supports_foreign_keys = False supports_column_check_constraints = False autocommits_when_autocommit_is_off = True can_introspect_decimal_field = False can_introspect_positive_integer_field = True can_introspect_small_integer_field = True supports_transactions = True atomic_transactions = False can_rollback_ddl = True supports_paramstyle_pyformat = False supports_sequence_reset = False can_clone_databases = True @cached_property def uses_savepoints(self): return Database.sqlite_version_info >= (3, 6, 8) @cached_property def can_release_savepoints(self): return self.uses_savepoints @cached_property def can_share_in_memory_db(self): return ( six.PY3 and Database.__name__ == 'sqlite3.dbapi2' and Database.sqlite_version_info >= (3, 7, 13) ) @cached_property def supports_stddev(self): """Confirm support for STDDEV and related stats functions SQLite supports STDDEV as an extension package; so connection.ops.check_expression_support() can't unilaterally rule out support for STDDEV. We need to manually check whether the call works. """ with self.connection.cursor() as cursor: cursor.execute('CREATE TABLE STDDEV_TEST (X INT)') try: cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST') has_support = True except utils.DatabaseError: has_support = False cursor.execute('DROP TABLE STDDEV_TEST') return has_support @cached_property def has_zoneinfo_database(self): return pytz is not None
bsd-3-clause
neopoly/rubyfox-server
lib/rubyfox/server/data/lib/Lib/weakref.py
8
7161
"""Weak reference support for Python. This module is an implementation of PEP 205: http://python.sourceforge.net/peps/pep-0205.html """ # Naming convention: Variables named "wr" are weak reference objects; # they are called this instead of "ref" to avoid name collisions with # the module-global ref() function imported from _weakref. import UserDict from _weakref import \ getweakrefcount, \ getweakrefs, \ ref, \ proxy, \ CallableProxyType, \ ProxyType, \ ReferenceType from exceptions import ReferenceError ProxyTypes = (ProxyType, CallableProxyType) __all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs", "WeakKeyDictionary", "ReferenceType", "ProxyType", "CallableProxyType", "ProxyTypes", "WeakValueDictionary"] class WeakValueDictionary(UserDict.UserDict): """Mapping class that references values weakly. Entries in the dictionary will be discarded when no strong reference to the value exists anymore """ # We inherit the constructor without worrying about the input # dictionary; since it uses our .update() method, we get the right # checks (if the other dictionary is a WeakValueDictionary, # objects are unwrapped on the way out, and we always wrap on the # way in). def __getitem__(self, key): o = self.data[key]() if o is None: raise KeyError, key else: return o def __repr__(self): return "<WeakValueDictionary at %s>" % id(self) def __setitem__(self, key, value): self.data[key] = ref(value, self.__makeremove(key)) def copy(self): new = WeakValueDictionary() for key, wr in self.data.items(): o = wr() if o is not None: new[key] = o return new def get(self, key, default=None): try: wr = self.data[key] except KeyError: return default else: o = wr() if o is None: # This should only happen return default else: return o def items(self): L = [] for key, wr in self.data.items(): o = wr() if o is not None: L.append((key, o)) return L def iteritems(self): return WeakValuedItemIterator(self) def iterkeys(self): return self.data.iterkeys() __iter__ = iterkeys def itervalues(self): return WeakValuedValueIterator(self) def popitem(self): while 1: key, wr = self.data.popitem() o = wr() if o is not None: return key, o def setdefault(self, key, default): try: wr = self.data[key] except KeyError: self.data[key] = ref(default, self.__makeremove(key)) return default else: return wr() def update(self, dict): d = self.data for key, o in dict.items(): d[key] = ref(o, self.__makeremove(key)) def values(self): L = [] for wr in self.data.values(): o = wr() if o is not None: L.append(o) return L def __makeremove(self, key): def remove(o, selfref=ref(self), key=key): self = selfref() if self is not None: del self.data[key] return remove class WeakKeyDictionary(UserDict.UserDict): """ Mapping class that references keys weakly. Entries in the dictionary will be discarded when there is no longer a strong reference to the key. This can be used to associate additional data with an object owned by other parts of an application without adding attributes to those objects. This can be especially useful with objects that override attribute accesses. """ def __init__(self, dict=None): self.data = {} def remove(k, selfref=ref(self)): self = selfref() if self is not None: del self.data[k] self._remove = remove if dict is not None: self.update(dict) def __delitem__(self, key): del self.data[ref(key)] def __getitem__(self, key): return self.data[ref(key)] def __repr__(self): return "<WeakKeyDictionary at %s>" % id(self) def __setitem__(self, key, value): self.data[ref(key, self._remove)] = value def copy(self): new = WeakKeyDictionary() for key, value in self.data.items(): o = key() if o is not None: new[o] = value return new def get(self, key, default=None): return self.data.get(ref(key),default) def has_key(self, key): try: wr = ref(key) except TypeError: return 0 return self.data.has_key(wr) def items(self): L = [] for key, value in self.data.items(): o = key() if o is not None: L.append((o, value)) return L def iteritems(self): return WeakKeyedItemIterator(self) def iterkeys(self): return WeakKeyedKeyIterator(self) __iter__ = iterkeys def itervalues(self): return self.data.itervalues() def keys(self): L = [] for wr in self.data.keys(): o = wr() if o is not None: L.append(o) return L def popitem(self): while 1: key, value = self.data.popitem() o = key() if o is not None: return o, value def setdefault(self, key, default): return self.data.setdefault(ref(key, self._remove),default) def update(self, dict): d = self.data for key, value in dict.items(): d[ref(key, self._remove)] = value class BaseIter: def __iter__(self): return self class WeakKeyedKeyIterator(BaseIter): def __init__(self, weakdict): self._next = weakdict.data.iterkeys().next def next(self): while 1: wr = self._next() obj = wr() if obj is not None: return obj class WeakKeyedItemIterator(BaseIter): def __init__(self, weakdict): self._next = weakdict.data.iteritems().next def next(self): while 1: wr, value = self._next() key = wr() if key is not None: return key, value class WeakValuedValueIterator(BaseIter): def __init__(self, weakdict): self._next = weakdict.data.itervalues().next def next(self): while 1: wr = self._next() obj = wr() if obj is not None: return obj class WeakValuedItemIterator(BaseIter): def __init__(self, weakdict): self._next = weakdict.data.iteritems().next def next(self): while 1: key, wr = self._next() value = wr() if value is not None: return key, value # no longer needed del UserDict
mit
liuliu/fast-rcnn
lib/roi_data_layer/roidb.py
43
5176
# -------------------------------------------------------- # Fast R-CNN # Copyright (c) 2015 Microsoft # Licensed under The MIT License [see LICENSE for details] # Written by Ross Girshick # -------------------------------------------------------- """Transform a roidb into a trainable roidb by adding a bunch of metadata.""" import numpy as np from fast_rcnn.config import cfg import utils.cython_bbox def prepare_roidb(imdb): """Enrich the imdb's roidb by adding some derived quantities that are useful for training. This function precomputes the maximum overlap, taken over ground-truth boxes, between each ROI and each ground-truth box. The class with maximum overlap is also recorded. """ roidb = imdb.roidb for i in xrange(len(imdb.image_index)): roidb[i]['image'] = imdb.image_path_at(i) # need gt_overlaps as a dense array for argmax gt_overlaps = roidb[i]['gt_overlaps'].toarray() # max overlap with gt over classes (columns) max_overlaps = gt_overlaps.max(axis=1) # gt class that had the max overlap max_classes = gt_overlaps.argmax(axis=1) roidb[i]['max_classes'] = max_classes roidb[i]['max_overlaps'] = max_overlaps # sanity checks # max overlap of 0 => class should be zero (background) zero_inds = np.where(max_overlaps == 0)[0] assert all(max_classes[zero_inds] == 0) # max overlap > 0 => class should not be zero (must be a fg class) nonzero_inds = np.where(max_overlaps > 0)[0] assert all(max_classes[nonzero_inds] != 0) def add_bbox_regression_targets(roidb): """Add information needed to train bounding-box regressors.""" assert len(roidb) > 0 assert 'max_classes' in roidb[0], 'Did you call prepare_roidb first?' num_images = len(roidb) # Infer number of classes from the number of columns in gt_overlaps num_classes = roidb[0]['gt_overlaps'].shape[1] for im_i in xrange(num_images): rois = roidb[im_i]['boxes'] max_overlaps = roidb[im_i]['max_overlaps'] max_classes = roidb[im_i]['max_classes'] roidb[im_i]['bbox_targets'] = \ _compute_targets(rois, max_overlaps, max_classes) # Compute values needed for means and stds # var(x) = E(x^2) - E(x)^2 class_counts = np.zeros((num_classes, 1)) + cfg.EPS sums = np.zeros((num_classes, 4)) squared_sums = np.zeros((num_classes, 4)) for im_i in xrange(num_images): targets = roidb[im_i]['bbox_targets'] for cls in xrange(1, num_classes): cls_inds = np.where(targets[:, 0] == cls)[0] if cls_inds.size > 0: class_counts[cls] += cls_inds.size sums[cls, :] += targets[cls_inds, 1:].sum(axis=0) squared_sums[cls, :] += (targets[cls_inds, 1:] ** 2).sum(axis=0) means = sums / class_counts stds = np.sqrt(squared_sums / class_counts - means ** 2) # Normalize targets for im_i in xrange(num_images): targets = roidb[im_i]['bbox_targets'] for cls in xrange(1, num_classes): cls_inds = np.where(targets[:, 0] == cls)[0] roidb[im_i]['bbox_targets'][cls_inds, 1:] -= means[cls, :] roidb[im_i]['bbox_targets'][cls_inds, 1:] /= stds[cls, :] # These values will be needed for making predictions # (the predicts will need to be unnormalized and uncentered) return means.ravel(), stds.ravel() def _compute_targets(rois, overlaps, labels): """Compute bounding-box regression targets for an image.""" # Ensure ROIs are floats rois = rois.astype(np.float, copy=False) # Indices of ground-truth ROIs gt_inds = np.where(overlaps == 1)[0] # Indices of examples for which we try to make predictions ex_inds = np.where(overlaps >= cfg.TRAIN.BBOX_THRESH)[0] # Get IoU overlap between each ex ROI and gt ROI ex_gt_overlaps = utils.cython_bbox.bbox_overlaps(rois[ex_inds, :], rois[gt_inds, :]) # Find which gt ROI each ex ROI has max overlap with: # this will be the ex ROI's gt target gt_assignment = ex_gt_overlaps.argmax(axis=1) gt_rois = rois[gt_inds[gt_assignment], :] ex_rois = rois[ex_inds, :] ex_widths = ex_rois[:, 2] - ex_rois[:, 0] + cfg.EPS ex_heights = ex_rois[:, 3] - ex_rois[:, 1] + cfg.EPS ex_ctr_x = ex_rois[:, 0] + 0.5 * ex_widths ex_ctr_y = ex_rois[:, 1] + 0.5 * ex_heights gt_widths = gt_rois[:, 2] - gt_rois[:, 0] + cfg.EPS gt_heights = gt_rois[:, 3] - gt_rois[:, 1] + cfg.EPS gt_ctr_x = gt_rois[:, 0] + 0.5 * gt_widths gt_ctr_y = gt_rois[:, 1] + 0.5 * gt_heights targets_dx = (gt_ctr_x - ex_ctr_x) / ex_widths targets_dy = (gt_ctr_y - ex_ctr_y) / ex_heights targets_dw = np.log(gt_widths / ex_widths) targets_dh = np.log(gt_heights / ex_heights) targets = np.zeros((rois.shape[0], 5), dtype=np.float32) targets[ex_inds, 0] = labels[ex_inds] targets[ex_inds, 1] = targets_dx targets[ex_inds, 2] = targets_dy targets[ex_inds, 3] = targets_dw targets[ex_inds, 4] = targets_dh return targets
mit
zhimin711/nova
nova/api/openstack/compute/legacy_v2/consoles.py
79
3328
# Copyright 2010 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob from webob import exc from nova.api.openstack import wsgi from nova.console import api as console_api from nova import exception def _translate_keys(cons): """Coerces a console instance into proper dictionary format.""" pool = cons['pool'] info = {'id': cons['id'], 'console_type': pool['console_type']} return dict(console=info) def _translate_detail_keys(cons): """Coerces a console instance into proper dictionary format with correctly mapped attributes. """ pool = cons['pool'] info = {'id': cons['id'], 'console_type': pool['console_type'], 'password': cons['password'], 'instance_name': cons['instance_name'], 'port': cons['port'], 'host': pool['public_hostname']} return dict(console=info) class Controller(object): """The Consoles controller for the OpenStack API.""" def __init__(self): self.console_api = console_api.API() def index(self, req, server_id): """Returns a list of consoles for this instance.""" consoles = self.console_api.get_consoles( req.environ['nova.context'], server_id) return dict(consoles=[_translate_keys(console) for console in consoles]) def create(self, req, server_id, body): """Creates a new console.""" try: self.console_api.create_console( req.environ['nova.context'], server_id) except exception.InstanceNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) def show(self, req, server_id, id): """Shows in-depth information on a specific console.""" try: console = self.console_api.get_console( req.environ['nova.context'], server_id, int(id)) except exception.NotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return _translate_detail_keys(console) def delete(self, req, server_id, id): """Deletes a console.""" try: self.console_api.delete_console(req.environ['nova.context'], server_id, int(id)) except exception.NotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return webob.Response(status_int=202) def create_resource(): return wsgi.Resource(Controller())
apache-2.0
rpp0/peapwn
mods/hostap/tests/hwsim/test_ap_acs.py
2
2170
#!/usr/bin/python # # Test cases for automatic channel selection with hostapd # Copyright (c) 2013, Jouni Malinen <j@w1.fi> # # This software may be distributed under the terms of the BSD license. # See README for more details. import logging logger = logging.getLogger() import hostapd def wait_acs(hapd): ev = hapd.wait_event(["ACS-STARTED", "ACS-COMPLETED", "ACS-FAILED", "AP-ENABLED"], timeout=5) if not ev: raise Exception("ACS start timed out") if "ACS-STARTED" not in ev: raise Exception("Unexpected ACS event") state = hapd.get_status_field("state") if state != "ACS": raise Exception("Unexpected interface state") ev = hapd.wait_event(["ACS-COMPLETED", "ACS-FAILED", "AP-ENABLED"], timeout=20) if not ev: raise Exception("ACS timed out") if "ACS-COMPLETED" not in ev: raise Exception("Unexpected ACS event") ev = hapd.wait_event(["AP-ENABLED"], timeout=5) if not ev: raise Exception("AP setup timed out") state = hapd.get_status_field("state") if state != "ENABLED": raise Exception("Unexpected interface state") def test_ap_acs(dev, apdev): """Automatic channel selection""" params = hostapd.wpa2_params(ssid="test-acs", passphrase="12345678") params['channel'] = '0' hapd = hostapd.add_ap(apdev[0]['ifname'], params) wait_acs(hapd) freq = hapd.get_status_field("freq") if int(freq) < 2400: raise Exception("Unexpected frequency") dev[0].connect("test-acs", psk="12345678", scan_freq=freq) def test_ap_multi_bss_acs(dev, apdev): """hostapd start with a multi-BSS configuration file using ACS""" ifname = apdev[0]['ifname'] hostapd.add_iface(ifname, 'multi-bss-acs.conf') hapd = hostapd.Hostapd(ifname) hapd.enable() wait_acs(hapd) freq = hapd.get_status_field("freq") if int(freq) < 2400: raise Exception("Unexpected frequency") dev[0].connect("bss-1", key_mgmt="NONE", scan_freq=freq) dev[1].connect("bss-2", psk="12345678", scan_freq=freq) dev[2].connect("bss-3", psk="qwertyuiop", scan_freq=freq)
gpl-2.0
waynesun09/tp-libvirt
libvirt/tests/src/libvirt_bench/libvirt_bench_vcpu_hotplug.py
4
6936
import logging import re from autotest.client.shared import error from virttest import libvirt_xml from virttest import libvirt_vm from virttest import utils_test from virttest.utils_test import libvirt def run(test, params, env): """ Test: vcpu hotplug. The command can change the number of virtual CPUs for VM. 1.Prepare test environment,destroy or suspend a VM. 2.Perform virsh setvcpus operation. 3.Recover test environment. 4.Confirm the test result. """ vm_name = params.get("main_vm") min_count = int(params.get("setvcpus_min_count", "1")) max_count = int(params.get("setvcpus_max_count", "2")) test_times = int(params.get("setvcpus_test_times", "1")) stress_type = params.get("stress_type", "") stress_param = params.get("stress_param", "") add_by_virsh = ("yes" == params.get("add_by_virsh")) del_by_virsh = ("yes" == params.get("del_by_virsh")) test_set_max = max_count * 2 # Save original configuration orig_config_xml = libvirt_xml.VMXML.new_from_inactive_dumpxml(vm_name) # Set min/max of vcpu libvirt_xml.VMXML.set_vm_vcpus(vm_name, test_set_max, min_count) # prepare VM instance vm = libvirt_vm.VM(vm_name, params, test.bindir, env.get("address_cache")) # prepare guest-agent service vm.prepare_guest_agent() # Increase the workload load_vms = [] if stress_type in ['cpu', 'memory', 'io']: params["stress_args"] = stress_param load_vms.append(vm) if stress_type in ['cpu', 'memory']: utils_test.load_stress("stress_in_vms", load_vms, params) else: utils_test.load_stress("iozone_in_vms", load_vms, params) session = vm.wait_for_login() try: # Clear dmesg before set vcpu session.cmd("dmesg -c") for i in range(test_times): # 1. Add vcpu add_result = libvirt.hotplug_domain_vcpu(vm_name, max_count, add_by_virsh) add_status = add_result.exit_status # 1.1 check add status if add_status: if add_result.stderr.count("support"): raise error.TestNAError("No need to test any more:\n %s" % add_result.stderr.strip()) raise error.TestFail("Test failed for:\n %s" % add_result.stderr.strip()) # 1.2 check dmesg domain_add_dmesg = session.cmd_output("dmesg -c") dmesg1 = "CPU%d has been hot-added" % (max_count - 1) dmesg2 = "CPU %d got hotplugged" % (max_count - 1) if (not domain_add_dmesg.count(dmesg1) and not domain_add_dmesg.count(dmesg2)): raise error.TestFail("Cannot find hotplug info in dmesg: %s" % domain_add_dmesg) # 1.3 check cpu related file online_cmd = "cat /sys/devices/system/cpu/cpu%d/online" \ % (max_count - 1) st, ot = session.cmd_status_output(online_cmd) if st: raise error.TestFail("Cannot find CPU%d after hotplug" % (max_count - 1)) # 1.4 check online if not ot.strip().count("1"): raise error.TestFail("CPU%d is not online after hotplug: %s" % ((max_count - 1), ot)) # 1.5 check online interrupts info inter_on_output = session.cmd_output("cat /proc/interrupts") if not inter_on_output.count("CPU%d" % (int(max_count) - 1)): raise error.TestFail("CPU%d can not be found in " "/proc/interrupts when it's online:%s" % ((int(max_count) - 1), inter_on_output)) # 1.6 offline vcpu off_st = session.cmd_status("echo 0 > " "/sys/devices/system/cpu/cpu%d/online" % (max_count - 1)) if off_st: raise error.TestFail("Set cpu%d offline failed!" % (max_count - 1)) # 1.7 check offline interrupts info inter_off_output = session.cmd_output("cat /proc/interrupts") if inter_off_output.count("CPU%d" % (int(max_count) - 1)): raise error.TestFail("CPU%d can be found in /proc/interrupts" " when it's offline" % (int(max_count) - 1)) # 2. Del vcpu del_result = libvirt.hotplug_domain_vcpu(vm_name, min_count, del_by_virsh, hotplug=False) del_status = del_result.exit_status if del_status: logging.info("del_result: %s" % del_result.stderr.strip()) # A qemu older than 1.5 or an unplug for 1.6 will result in # the following failure. # TODO: when CPU-hotplug feature becomes stable and strong, # remove these codes used to handle kinds of exceptions if re.search("The command cpu-del has not been found", del_result.stderr): raise error.TestNAError("unhotplug failed") if re.search("cannot change vcpu count", del_result.stderr): raise error.TestNAError("unhotplug failed") if re.search("got wrong number of vCPU pids from QEMU monitor", del_result.stderr): raise error.TestNAError("unhotplug failed") # process all tips that contains keyword 'support' # for example, "unsupported"/"hasn't been support" and so on if re.search("support", del_result.stderr): raise error.TestNAError("unhotplug failed") # besides above, regard it failed raise error.TestFail("Test fail for:\n %s" % del_result.stderr.strip()) domain_del_dmesg = session.cmd_output("dmesg -c") if not domain_del_dmesg.count("CPU %d is now offline" % (max_count - 1)): raise error.TestFail("Cannot find hot-unplug info in dmesg: %s" % domain_del_dmesg) except error.TestNAError: # So far, QEMU doesn't support unplug vcpu, # unplug operation will encounter kind of errors. pass finally: session.close() # Cleanup orig_config_xml.sync()
gpl-2.0
podio/podio-py
pypodio2/encode.py
1
15016
"""multipart/form-data encoding module This module provides functions that faciliate encoding name/value pairs as multipart/form-data suitable for a HTTP POST or PUT request. multipart/form-data is the standard way to upload files over HTTP""" import mimetypes import os import re import urllib from email.header import Header __all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam', 'encode_string', 'encode_file_header', 'get_body_size', 'get_headers', 'multipart_encode'] try: from io import UnsupportedOperation except ImportError: UnsupportedOperation = None try: import uuid def gen_boundary(): """Returns a random string to use as the boundary for a message""" return uuid.uuid4().hex except ImportError: import random import sha def gen_boundary(): """Returns a random string to use as the boundary for a message""" bits = random.getrandbits(160) return sha.new(str(bits)).hexdigest() def encode_and_quote(data): """If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8")) otherwise return urllib.quote_plus(data)""" if data is None: return None if isinstance(data, unicode): data = data.encode("utf-8") return urllib.quote_plus(data) def _strify(s): """If s is a unicode string, encode it to UTF-8 and return the results, otherwise return str(s), or None if s is None""" if s is None: return None if isinstance(s, unicode): return s.encode("utf-8") return str(s) class MultipartParam(object): """Represents a single parameter in a multipart/form-data request ``name`` is the name of this parameter. If ``value`` is set, it must be a string or unicode object to use as the data for this parameter. If ``filename`` is set, it is what to say that this parameter's filename is. Note that this does not have to be the actual filename any local file. If ``filetype`` is set, it is used as the Content-Type for this parameter. If unset it defaults to "text/plain; charset=utf8" If ``filesize`` is set, it specifies the length of the file ``fileobj`` If ``fileobj`` is set, it must be a file-like object that supports .read(). Both ``value`` and ``fileobj`` must not be set, doing so will raise a ValueError assertion. If ``fileobj`` is set, and ``filesize`` is not specified, then the file's size will be determined first by stat'ing ``fileobj``'s file descriptor, and if that fails, by seeking to the end of the file, recording the current position as the size, and then by seeking back to the beginning of the file. ``cb`` is a callable which will be called from iter_encode with (self, current, total), representing the current parameter, current amount transferred, and the total size. """ def __init__(self, name, value=None, filename=None, filetype=None, filesize=None, fileobj=None, cb=None): self.name = Header(name).encode() self.value = _strify(value) if filename is None: self.filename = None else: if isinstance(filename, unicode): # Encode with XML entities self.filename = filename.encode("ascii", "xmlcharrefreplace") else: self.filename = str(filename) self.filename = self.filename.encode("string_escape"). \ replace('"', '\\"') self.filetype = _strify(filetype) self.filesize = filesize self.fileobj = fileobj self.cb = cb if self.value is not None and self.fileobj is not None: raise ValueError("Only one of value or fileobj may be specified") if fileobj is not None and filesize is None: # Try and determine the file size try: self.filesize = os.fstat(fileobj.fileno()).st_size except (OSError, AttributeError, UnsupportedOperation): try: fileobj.seek(0, 2) self.filesize = fileobj.tell() fileobj.seek(0) except: raise ValueError("Could not determine filesize") def __cmp__(self, other): attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj'] myattrs = [getattr(self, a) for a in attrs] oattrs = [getattr(other, a) for a in attrs] return cmp(myattrs, oattrs) def reset(self): if self.fileobj is not None: self.fileobj.seek(0) elif self.value is None: raise ValueError("Don't know how to reset this parameter") @classmethod def from_file(cls, paramname, filename): """Returns a new MultipartParam object constructed from the local file at ``filename``. ``filesize`` is determined by os.path.getsize(``filename``) ``filetype`` is determined by mimetypes.guess_type(``filename``)[0] ``filename`` is set to os.path.basename(``filename``) """ return cls(paramname, filename=os.path.basename(filename), filetype=mimetypes.guess_type(filename)[0], filesize=os.path.getsize(filename), fileobj=open(filename, "rb")) @classmethod def from_params(cls, params): """Returns a list of MultipartParam objects from a sequence of name, value pairs, MultipartParam instances, or from a mapping of names to values The values may be strings or file objects, or MultipartParam objects. MultipartParam object names must match the given names in the name,value pairs or mapping, if applicable.""" if hasattr(params, 'items'): params = params.items() retval = [] for item in params: if isinstance(item, cls): retval.append(item) continue name, value = item if isinstance(value, cls): assert value.name == name retval.append(value) continue if hasattr(value, 'read'): # Looks like a file object filename = getattr(value, 'name', None) if filename is not None: filetype = mimetypes.guess_type(filename)[0] else: filetype = None retval.append(cls(name=name, filename=filename, filetype=filetype, fileobj=value)) else: retval.append(cls(name, value)) return retval def encode_hdr(self, boundary): """Returns the header of the encoding of this parameter""" boundary = encode_and_quote(boundary) headers = ["--%s" % boundary] if self.filename: disposition = 'form-data; name="%s"; filename="%s"' % (self.name, self.filename) else: disposition = 'form-data; name="%s"' % self.name headers.append("Content-Disposition: %s" % disposition) if self.filetype: filetype = self.filetype else: filetype = "text/plain; charset=utf-8" headers.append("Content-Type: %s" % filetype) headers.append("") headers.append("") return "\r\n".join(headers) def encode(self, boundary): """Returns the string encoding of this parameter""" if self.value is None: value = self.fileobj.read() else: value = self.value if re.search("^--%s$" % re.escape(boundary), value, re.M): raise ValueError("boundary found in encoded string") return "%s%s\r\n" % (self.encode_hdr(boundary), value) def iter_encode(self, boundary, blocksize=4096): """Yields the encoding of this parameter If self.fileobj is set, then blocks of ``blocksize`` bytes are read and yielded.""" total = self.get_size(boundary) current = 0 if self.value is not None: block = self.encode(boundary) current += len(block) yield block if self.cb: self.cb(self, current, total) else: block = self.encode_hdr(boundary) current += len(block) yield block if self.cb: self.cb(self, current, total) last_block = "" encoded_boundary = "--%s" % encode_and_quote(boundary) boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary), re.M) while True: block = self.fileobj.read(blocksize) if not block: current += 2 yield "\r\n" if self.cb: self.cb(self, current, total) break last_block += block if boundary_exp.search(last_block): raise ValueError("boundary found in file data") last_block = last_block[-len(encoded_boundary) - 2:] current += len(block) yield block if self.cb: self.cb(self, current, total) def get_size(self, boundary): """Returns the size in bytes that this param will be when encoded with the given boundary.""" if self.filesize is not None: valuesize = self.filesize else: valuesize = len(self.value) return len(self.encode_hdr(boundary)) + 2 + valuesize def encode_string(boundary, name, value): """Returns ``name`` and ``value`` encoded as a multipart/form-data variable. ``boundary`` is the boundary string used throughout a single request to separate variables.""" return MultipartParam(name, value).encode(boundary) def encode_file_header(boundary, paramname, filesize, filename=None, filetype=None): """Returns the leading data for a multipart/form-data field that contains file data. ``boundary`` is the boundary string used throughout a single request to separate variables. ``paramname`` is the name of the variable in this request. ``filesize`` is the size of the file data. ``filename`` if specified is the filename to give to this field. This field is only useful to the server for determining the original filename. ``filetype`` if specified is the MIME type of this file. The actual file data should be sent after this header has been sent. """ return MultipartParam(paramname, filesize=filesize, filename=filename, filetype=filetype).encode_hdr(boundary) def get_body_size(params, boundary): """Returns the number of bytes that the multipart/form-data encoding of ``params`` will be.""" size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params)) return size + len(boundary) + 6 def get_headers(params, boundary): """Returns a dictionary with Content-Type and Content-Length headers for the multipart/form-data encoding of ``params``.""" headers = {} boundary = urllib.quote_plus(boundary) headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary headers['Content-Length'] = str(get_body_size(params, boundary)) return headers class MultipartYielder: def __init__(self, params, boundary, cb): self.params = params self.boundary = boundary self.cb = cb self.i = 0 self.p = None self.param_iter = None self.current = 0 self.total = get_body_size(params, boundary) def __iter__(self): return self def next(self): """generator function to yield multipart/form-data representation of parameters""" if self.param_iter is not None: try: block = self.param_iter.next() self.current += len(block) if self.cb: self.cb(self.p, self.current, self.total) return block except StopIteration: self.p = None self.param_iter = None if self.i is None: raise StopIteration elif self.i >= len(self.params): self.param_iter = None self.p = None self.i = None block = "--%s--\r\n" % self.boundary self.current += len(block) if self.cb: self.cb(self.p, self.current, self.total) return block self.p = self.params[self.i] self.param_iter = self.p.iter_encode(self.boundary) self.i += 1 return self.next() def reset(self): self.i = 0 self.current = 0 for param in self.params: param.reset() def multipart_encode(params, boundary=None, cb=None): """Encode ``params`` as multipart/form-data. ``params`` should be a sequence of (name, value) pairs or MultipartParam objects, or a mapping of names to values. Values are either strings parameter values, or file-like objects to use as the parameter value. The file-like objects must support .read() and either .fileno() or both .seek() and .tell(). If ``boundary`` is set, then it as used as the MIME boundary. Otherwise a randomly generated boundary will be used. In either case, if the boundary string appears in the parameter values a ValueError will be raised. If ``cb`` is set, it should be a callback which will get called as blocks of data are encoded. It will be called with (param, current, total), indicating the current parameter being encoded, the current amount encoded, and the total amount to encode. Returns a tuple of `datagen`, `headers`, where `datagen` is a generator that will yield blocks of data that make up the encoded parameters, and `headers` is a dictionary with the assoicated Content-Type and Content-Length headers. Examples: >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] ) >>> s = "".join(datagen) >>> assert "value2" in s and "value1" in s >>> p = MultipartParam("key", "value2") >>> datagen, headers = multipart_encode( [("key", "value1"), p] ) >>> s = "".join(datagen) >>> assert "value2" in s and "value1" in s >>> datagen, headers = multipart_encode( {"key": "value1"} ) >>> s = "".join(datagen) >>> assert "value2" not in s and "value1" in s """ if boundary is None: boundary = gen_boundary() else: boundary = urllib.quote_plus(boundary) headers = get_headers(params, boundary) params = MultipartParam.from_params(params) return MultipartYielder(params, boundary, cb), headers
mit
yencarnacion/jaikuengine
.google_appengine/lib/django-1.3/django/core/files/uploadedfile.py
402
4225
""" Classes representing uploaded files. """ import os try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.conf import settings from django.core.files.base import File from django.core.files import temp as tempfile from django.utils.encoding import smart_str __all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile', 'SimpleUploadedFile') class UploadedFile(File): """ A abstract uploaded file (``TemporaryUploadedFile`` and ``InMemoryUploadedFile`` are the built-in concrete subclasses). An ``UploadedFile`` object behaves somewhat like a file object and represents some file data that the user submitted with a form. """ DEFAULT_CHUNK_SIZE = 64 * 2**10 def __init__(self, file=None, name=None, content_type=None, size=None, charset=None): super(UploadedFile, self).__init__(file, name) self.size = size self.content_type = content_type self.charset = charset def __repr__(self): return "<%s: %s (%s)>" % ( self.__class__.__name__, smart_str(self.name), self.content_type) def _get_name(self): return self._name def _set_name(self, name): # Sanitize the file name so that it can't be dangerous. if name is not None: # Just use the basename of the file -- anything else is dangerous. name = os.path.basename(name) # File names longer than 255 characters can cause problems on older OSes. if len(name) > 255: name, ext = os.path.splitext(name) name = name[:255 - len(ext)] + ext self._name = name name = property(_get_name, _set_name) class TemporaryUploadedFile(UploadedFile): """ A file uploaded to a temporary location (i.e. stream-to-disk). """ def __init__(self, name, content_type, size, charset): if settings.FILE_UPLOAD_TEMP_DIR: file = tempfile.NamedTemporaryFile(suffix='.upload', dir=settings.FILE_UPLOAD_TEMP_DIR) else: file = tempfile.NamedTemporaryFile(suffix='.upload') super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset) def temporary_file_path(self): """ Returns the full path of this file. """ return self.file.name def close(self): try: return self.file.close() except OSError, e: if e.errno != 2: # Means the file was moved or deleted before the tempfile # could unlink it. Still sets self.file.close_called and # calls self.file.file.close() before the exception raise class InMemoryUploadedFile(UploadedFile): """ A file uploaded into memory (i.e. stream-to-memory). """ def __init__(self, file, field_name, name, content_type, size, charset): super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset) self.field_name = field_name def open(self, mode=None): self.file.seek(0) def close(self): pass def chunks(self, chunk_size=None): self.file.seek(0) yield self.read() def multiple_chunks(self, chunk_size=None): # Since it's in memory, we'll never have multiple chunks. return False class SimpleUploadedFile(InMemoryUploadedFile): """ A simple representation of a file, which just has content, size, and a name. """ def __init__(self, name, content, content_type='text/plain'): content = content or '' super(SimpleUploadedFile, self).__init__(StringIO(content), None, name, content_type, len(content), None) def from_dict(cls, file_dict): """ Creates a SimpleUploadedFile object from a dictionary object with the following keys: - filename - content-type - content """ return cls(file_dict['filename'], file_dict['content'], file_dict.get('content-type', 'text/plain')) from_dict = classmethod(from_dict)
apache-2.0
osrf/opensplice
src/api/dcps/python/test/test_listeners.py
2
22244
# # Vortex OpenSplice # # This software and documentation are Copyright 2006 to TO_YEAR ADLINK # Technology Limited, its affiliated companies and licensors. All rights # reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ''' Created on Dec 27, 2017 @author: prismtech ''' import unittest from dds import Listener, DomainParticipant, Qos, DurabilityQosPolicy, DDSDurabilityKind, DDSException,\ DeadlineQosPolicy, DDSDuration, LivelinessQosPolicy, DDSLivelinessKind,\ OwnershipQosPolicy, DDSOwnershipKind, ResourceLimitsQosPolicy,\ DestinationOrderQosPolicy, DDSDestinationOrderKind, DDSTime,\ PublicationMatchedStatus, SubscriptionMatchedStatus,\ OfferedDeadlineMissedStatus, OfferedIncompatibleQosStatus, QosPolicyId,\ LivelinessLostStatus, LivelinessChangedStatus, RequestedDeadlineMissedStatus,\ RequestedIncompatibleQosStatus, SampleRejectedStatus,\ DDSSampleRejectedStatusKind, SampleLostStatus import ddsutil import os import threading import time from symbol import nonlocal_stmt import collections Info = collections.namedtuple('Info', ['name', 'type']) class TestListeners(unittest.TestCase): idl_path = os.path.join('idl', 'Shapes.idl') shape_type_name = 'ShapeType' time_out = 10.0 def _check_status(self, status, type, field_info): self.assertIsInstance(status, type, 'status is not {}'.format(type)) self.assertEqual(len(field_info), len(type._fields), 'incorrect number of field_info entries') for n, t in field_info: self.assertTrue(hasattr(status,n), 'status does not have attr {}'.format(n)) self.assertIsInstance(getattr(status,n), t, 'status.{} is not a {}'.format(n,t)) def test_on_data_available(self): topic_name = 'ST_on_data_available' event = threading.Event() dp1 = DomainParticipant() gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') t1 = gci.register_topic(dp1, topic_name) wr1 = dp1.create_datawriter(t1) class L(Listener): def on_data_available(self,_): event.set() dp2 = DomainParticipant() t2 = gci.register_topic(dp2, topic_name) rd2 = dp2.create_datareader(t2,listener=L()) data = ShapeType(color='RED',x=1,y=2,z=3,t=Inner(foo=4)) wr1.write(data) self.assertTrue(event.wait(self.time_out),'Did not receive on_data_available') def test_on_inconsistent_topic(self): ''' from: osplo/testsuite/dbt/api/dcps/c99/utest/listener/code/listener_utests.c It's not that easy for OpenSplice to generate inconsistent_topic events. However, it is build on top of SAC and it works on that language binding. We can assume that this test succeeds when the other listener test pass as well... So, we will just check that the listener's actually got installed ''' topic_name = 'ST_on_inconsistent_topic' event = threading.Event() gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) # gci2 = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name + '2') class L(Listener): def on_inconsistent_topic(self, topic, status): print('on_inconsistent_topic triggered: topic name = {}, total_count = {}, total_change_count = {}' .format(topic.get_name(), status.total_coutn, status.total_change_count)) event.set() dp1 = DomainParticipant(listener=L()) self.assertIsNotNone(dp1.listener, "DomainParticipant Listener was not set") t1 = gci.register_topic(dp1, topic_name, listener=L()) self.assertIsNotNone(t1.listener, "Topic Listener was not set") # t2qos = Qos([DurabilityQosPolicy(DDSDurabilityKind.PERSISTENT)]) # try: # t2 = gci2.register_topic(dp2, topic_name, qos=None) # self.fail("expected this topic registeration to fail") # except DDSException as e: # pass # # try: # self.assertTrue(self.event.wait(self.time_out),'Did not receive on_inconsistent_topic') # finally: # pass def test_data_available_listeners(self): dp_on_data_available_event = threading.Event() dp_on_publication_matched_event = threading.Event() dp_on_subscription_matched_event = threading.Event() p_on_publication_matched_event = threading.Event() s_on_data_available_event = threading.Event() s_on_subscription_matched_event = threading.Event() wr_on_publication_matched_event = threading.Event() rd_on_data_available_event = threading.Event() rd_on_subscription_matched_event = threading.Event() opm_event = threading.Event() osm_event = threading.Event() oda_event = threading.Event() pub_match_status = None sub_match_status = None class DPL(Listener): def on_data_available(self,reader): dp_on_data_available_event.set() oda_event.set() def on_publication_matched(self,writer,status): dp_on_publication_matched_event.set() opm_event.set() def on_subscription_matched(self,reader,status): dp_on_subscription_matched_event.set() osm_event.set() class PL(Listener): def on_publication_matched(self,writer, status): p_on_publication_matched_event.set() class SL(Listener): def on_data_available(self,reader): s_on_data_available_event.set() oda_event.set() def on_subscription_matched(self,reader, status): s_on_subscription_matched_event.set() osm_event.set() class WL(Listener): def on_publication_matched(self,writer, status): nonlocal pub_match_status pub_match_status = status wr_on_publication_matched_event.set() opm_event.set() class RL(Listener): def on_data_available(self,reader): rd_on_data_available_event.set() oda_event.set() def on_subscription_matched(self,reader, status): nonlocal sub_match_status sub_match_status = status rd_on_subscription_matched_event.set() osm_event.set() dp = DomainParticipant(listener=DPL()) self.assertIsInstance(dp.listener, DPL, 'listener is not a DPL') topic_name = 'ST_data_available_listeners' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) pub = dp.create_publisher(listener=PL()) self.assertIsInstance(pub.listener, PL, 'listener is not a PL') sub = dp.create_subscriber(listener=SL()) self.assertIsInstance(sub.listener, SL, 'listener is not a SL') wr = pub.create_datawriter(t, listener=WL()) self.assertIsInstance(wr.listener, WL, 'listener is not a WL') rd = sub.create_datareader(t, listener=RL()) self.assertIsInstance(rd.listener, RL, 'listener is not a RL') ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) # time.sleep(1.0) wr.write(data) TriggerState = collections.namedtuple('TriggerState',[ 'opm', 'osm', 'oda', ]) actual_trigger_state = TriggerState( opm_event.wait(self.time_out), osm_event.wait(self.time_out), oda_event.wait(self.time_out)) print(actual_trigger_state) self.assertEqual( actual_trigger_state, TriggerState(True, True, True) , 'Not all events triggered') EventState = collections.namedtuple('EventState',[ 'dp_opm', 'p_opm', 'wr_opm', 'dp_osm', 's_osm', 'rd_osm', 'dp_oda', 's_oda', 'rd_oda', ]) actual_event_state = EventState( dp_on_publication_matched_event.is_set(), p_on_publication_matched_event.is_set(), wr_on_publication_matched_event.is_set(), dp_on_subscription_matched_event.is_set(), s_on_subscription_matched_event.is_set(), rd_on_subscription_matched_event.is_set(), dp_on_data_available_event.is_set(), s_on_data_available_event.is_set(), rd_on_data_available_event.is_set(), ) expected_event_state = EventState( False, False, True, False, False, True, False, False, True, ) print(actual_event_state) self.assertEqual(actual_event_state, expected_event_state, 'Incorrect listeners triggered') # time.sleep(1.0) # self.assertTrue(wr_on_publication_matched_event.wait(self.time_out), 'wr_on_publication_matched_event') # self.assertTrue(rd_on_subscription_matched_event.wait(self.time_out), 'rd_on_subscription_matched_event') self._check_status(pub_match_status, PublicationMatchedStatus, [ Info('total_count', int), Info('total_count_change', int), Info('current_count', int), Info('current_count_change', int), Info('last_subscription_handle', int), ]) self._check_status(sub_match_status, SubscriptionMatchedStatus, [ Info('total_count', int), Info('total_count_change', int), Info('current_count', int), Info('current_count_change', int), Info('last_publication_handle', int), ]) def test_on_offered_deadline_missed(self): handlerTriggered = threading.Event() write_time = 0.0 delay = 0.0 saved_status = None class L(Listener): def on_offered_deadline_missed(self, writer, status): nonlocal delay nonlocal saved_status handlerTriggered.set() saved_status = status delay = time.time() - write_time dp = DomainParticipant() topic_name = 'ST_on_offered_deadline_missed' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) wqos = Qos(policies=[ DeadlineQosPolicy(DDSDuration(1,0)) ]) wr = dp.create_datawriter(t, wqos, L()) rd = dp.create_datareader(t) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) wr.write(data) write_time = time.time() self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self.assertGreaterEqual(delay, 1.0 - 0.05, 'Delay not >= 1.0s') self._check_status(saved_status, OfferedDeadlineMissedStatus, [ Info('total_count', int), Info('total_count_change', int), Info('last_instance_handle', int), ]) def test_on_offered_incompatible_qos(self): handlerTriggered = threading.Event() saved_status = None class L(Listener): def on_offered_incompatible_qos(self, writer, status): nonlocal saved_status saved_status = status handlerTriggered.set() dp = DomainParticipant() topic_name = 'ST_on_offered_incompatible_qos' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) wqos = Qos(policies=[ DurabilityQosPolicy(DDSDurabilityKind.VOLATILE) ]) rqos = Qos(policies=[ DurabilityQosPolicy(DDSDurabilityKind.TRANSIENT) ]) wr = dp.create_datawriter(t, wqos, L()) rd = dp.create_datareader(t,rqos) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) wr.write(data) self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self._check_status(saved_status, OfferedIncompatibleQosStatus, [ Info('total_count', int), Info('total_count_change', int), Info('last_policy_id', QosPolicyId), ]) def test_liveliness(self): handlerTriggered = threading.Event() aliveTriggered = threading.Event() notaliveTriggered = threading.Event() write_time = 0.0 delay = 0.0 saved_lost_status = None saved_changed_status = None class L(Listener): def on_liveliness_lost(self, writer, status): nonlocal delay nonlocal saved_lost_status saved_lost_status = status handlerTriggered.set() delay = time.time() - write_time class RL(Listener): def on_liveliness_changed(self, reader, status): nonlocal saved_changed_status saved_changed_status = status if status.alive_count == 1: aliveTriggered.set() else: notaliveTriggered.set() qos = Qos(policies=[ LivelinessQosPolicy(DDSLivelinessKind.MANUAL_BY_TOPIC, DDSDuration(1,0)), OwnershipQosPolicy(DDSOwnershipKind.EXCLUSIVE) ]) dp = DomainParticipant() topic_name = 'ST_liveliness' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name, qos) wr = dp.create_datawriter(t, qos=qos, listener=L()) rd = dp.create_datareader(t, qos=qos, listener=RL()) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) wr.write(data) write_time = time.time() self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self.assertGreaterEqual(delay, 1.0 - 0.05, 'Delay not >= 1.0s') self.assertTrue(aliveTriggered.wait(self.time_out), 'Alive not signaled to reader') self.assertTrue(notaliveTriggered.wait(self.time_out), 'Not Alive not signaled to reader') self._check_status(saved_lost_status, LivelinessLostStatus, [ Info('total_count', int), Info('total_count_change', int), ]) self._check_status(saved_changed_status, LivelinessChangedStatus, [ Info('alive_count', int), Info('not_alive_count', int), Info('alive_count_change', int), Info('not_alive_count_change', int), Info('last_publication_handle', int), ]) def test_on_requested_deadline_missed(self): handlerTriggered = threading.Event() write_time = 0.0 delay = 0.0 saved_status = None class L(Listener): def on_requested_deadline_missed(self, reader, status): nonlocal delay nonlocal saved_status saved_status = status handlerTriggered.set() delay = time.time() - write_time dp = DomainParticipant() topic_name = 'ST_on_requested_deadline_missed' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) qos = Qos(policies=[ DeadlineQosPolicy(DDSDuration(1,0)) ]) wr = dp.create_datawriter(t, qos) rd = dp.create_datareader(t, qos, L()) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) wr.write(data) write_time = time.time() self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self.assertGreaterEqual(delay, 1.0 - 0.05, 'Delay not >= 1.0s') self._check_status(saved_status, RequestedDeadlineMissedStatus, [ Info('total_count', int), Info('total_count_change', int), Info('last_instance_handle', int), ]) def test_on_requested_incompatible_qos(self): handlerTriggered = threading.Event() saved_status = None class L(Listener): def on_requested_incompatible_qos(self, reader, status): nonlocal saved_status saved_status = status handlerTriggered.set() dp = DomainParticipant() topic_name = 'ST_test_on_requested_incompatible_qos' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) wqos = Qos(policies=[ DurabilityQosPolicy(DDSDurabilityKind.VOLATILE) ]) rqos = Qos(policies=[ DurabilityQosPolicy(DDSDurabilityKind.TRANSIENT) ]) wr = dp.create_datawriter(t, wqos) rd = dp.create_datareader(t,rqos, L()) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) wr.write(data) self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self._check_status(saved_status, RequestedIncompatibleQosStatus, [ Info('total_count', int), Info('total_count_change', int), Info('last_policy_id', QosPolicyId), ]) def test_on_sample_rejected(self): handlerTriggered = threading.Event() saved_status = None class L(Listener): def on_sample_rejected(self, reader, status): nonlocal saved_status saved_status = status handlerTriggered.set() dp = DomainParticipant() topic_name = 'ST_on_sample_rejected' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) qos = Qos(policies=[ ResourceLimitsQosPolicy(max_samples=1) ]) wr = dp.create_datawriter(t) rd = dp.create_datareader(t, qos, L()) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data1 = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) data2 = ShapeType(color='BLUE', x=222, y=233, z=244, t=Inner(foo=255)) wr.write(data1) self.assertFalse(handlerTriggered.is_set(), 'Event already triggered') wr.write(data2) self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self._check_status(saved_status, SampleRejectedStatus, [ Info('total_count', int), Info('total_count_change', int), Info('last_reason', DDSSampleRejectedStatusKind), Info('last_instance_handle', int), ]) def test_on_sample_lost(self): handlerTriggered = threading.Event() saved_status = None class L(Listener): def on_sample_lost(self, reader, status): nonlocal saved_status saved_status = status handlerTriggered.set() qos = Qos(policies=[ DestinationOrderQosPolicy(DDSDestinationOrderKind.BY_SOURCE_TIMESTAMP) ]) dp = DomainParticipant() topic_name = 'ST_on_sample_lost' gci = ddsutil.get_dds_classes_from_idl(self.idl_path, self.shape_type_name) t = gci.register_topic(dp, topic_name) wr = dp.create_datawriter(t, qos) rd = dp.create_datareader(t, qos, L()) ShapeType = gci.get_class('ShapeType') Inner = gci.get_class('Inner') data1 = ShapeType(color='GREEN', x=22, y=33, z=44, t=Inner(foo=55)) t1 = DDSTime(1000,0) t2 = DDSTime(1001,0) # write out-of-order samples wr.write_ts(data1, t2) rd.take() wr.write_ts(data1, t1) self.assertTrue(handlerTriggered.wait(self.time_out), 'Event not triggered') self._check_status(saved_status, SampleLostStatus, [ Info('total_count', int), Info('total_count_change', int), ]) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
apache-2.0
DarkmatterVale/HaikuPorts-Cleaner
Options.py
1
1124
# -*- coding: utf-8 -*- # # Copyright 2016 Vale Tolpegin # Distributed under the terms of the MIT License. # -- Modules ------------------------------------------------------------------ from optparse import OptionParser # -- global options ----------------------------------------------------------- global __Options__ # -- getOption ===------------------------------------------------------------- def getOption(string): """ Fetches an option by name """ return getattr(__Options__, string) # -- splitCommaSeparatedList -------------------------------------------------- def setCommaSeparatedList(option, opt, value, parser): setattr(parser.values, option.dest, value.split(',')) # -- parseOptions ------------------------------------------------------------- def parseOptions(): """ Does command line argument parsing """ parser = OptionParser(usage='usage: %prog [options] portname[-portversion]', version='0.0.1') parser.add_option('-d', '--directory', dest='directory', help="haikuports directory") global __Options__ (__Options__, args) = parser.parse_args() return (__Options__, args)
mit
jelugbo/hebs_master
pavelib/utils/envs.py
6
5485
""" Helper functions for loading environment settings. """ from __future__ import print_function import os import sys import json from lazy import lazy from path import path import memcache class Env(object): """ Load information about the execution environment. """ # Root of the git repository (edx-platform) REPO_ROOT = path(__file__).abspath().parent.parent.parent # Reports Directory REPORT_DIR = REPO_ROOT / 'reports' # Bok_choy dirs BOK_CHOY_DIR = REPO_ROOT / "common" / "test" / "acceptance" BOK_CHOY_LOG_DIR = REPO_ROOT / "test_root" / "log" BOK_CHOY_REPORT_DIR = REPORT_DIR / "bok_choy" BOK_CHOY_COVERAGERC = BOK_CHOY_DIR / ".coveragerc" # For the time being, stubs are used by both the bok-choy and lettuce acceptance tests # For this reason, the stubs package is currently located in the Django app called "terrain" # where other lettuce configuration is stored. BOK_CHOY_STUB_DIR = REPO_ROOT / "common" / "djangoapps" / "terrain" # Directory that videos are served from VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video" BOK_CHOY_SERVERS = { 'lms': { 'port': 8003, 'log': BOK_CHOY_LOG_DIR / "bok_choy_lms.log" }, 'cms': { 'port': 8031, 'log': BOK_CHOY_LOG_DIR / "bok_choy_studio.log" } } BOK_CHOY_STUBS = { 'xqueue': { 'port': 8040, 'log': BOK_CHOY_LOG_DIR / "bok_choy_xqueue.log", 'config': 'register_submission_url=http://0.0.0.0:8041/test/register_submission', }, 'ora': { 'port': 8041, 'log': BOK_CHOY_LOG_DIR / "bok_choy_ora.log", 'config': '', }, 'comments': { 'port': 4567, 'log': BOK_CHOY_LOG_DIR / "bok_choy_comments.log", }, 'video': { 'port': 8777, 'log': BOK_CHOY_LOG_DIR / "bok_choy_video_sources.log", 'config': "root_dir={}".format(VIDEO_SOURCE_DIR), }, 'youtube': { 'port': 9080, 'log': BOK_CHOY_LOG_DIR / "bok_choy_youtube.log", } } # Mongo databases that will be dropped before/after the tests run BOK_CHOY_MONGO_DATABASE = "test" BOK_CHOY_CACHE = memcache.Client(['0.0.0.0:11211'], debug=0) # Test Ids Directory TEST_DIR = REPO_ROOT / ".testids" # Files used to run each of the js test suites # TODO: Store this as a dict. Order seems to matter for some # reason. See issue TE-415. JS_TEST_ID_FILES = [ REPO_ROOT / 'lms/static/js_test.yml', REPO_ROOT / 'lms/static/js_test_coffee.yml', REPO_ROOT / 'cms/static/js_test.yml', REPO_ROOT / 'cms/static/js_test_squire.yml', REPO_ROOT / 'common/lib/xmodule/xmodule/js/js_test.yml', REPO_ROOT / 'common/static/js_test.yml', ] JS_TEST_ID_KEYS = [ 'lms', 'lms-coffee', 'cms', 'cms-squire', 'xmodule', 'common', ] JS_REPORT_DIR = REPORT_DIR / 'javascript' # Directories used for common/lib/ tests LIB_TEST_DIRS = [] for item in (REPO_ROOT / "common/lib").listdir(): if (REPO_ROOT / 'common/lib' / item).isdir(): LIB_TEST_DIRS.append(path("common/lib") / item.basename()) LIB_TEST_DIRS.append(path("pavelib/paver_tests")) # Directory for i18n test reports I18N_REPORT_DIR = REPORT_DIR / 'i18n' # Service variant (lms, cms, etc.) configured with an environment variable # We use this to determine which envs.json file to load. SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None) # If service variant not configured in env, then pass the correct # environment for lms / cms if not SERVICE_VARIANT: # this will intentionally catch ""; if any(i in sys.argv[1:] for i in ('cms', 'studio')): SERVICE_VARIANT = 'cms' else: SERVICE_VARIANT = 'lms' @lazy def env_tokens(self): """ Return a dict of environment settings. If we couldn't find the JSON file, issue a warning and return an empty dict. """ # Find the env JSON file if self.SERVICE_VARIANT: env_path = self.REPO_ROOT.parent / "{service}.env.json".format(service=self.SERVICE_VARIANT) else: env_path = path("env.json").abspath() # If the file does not exist, here or one level up, # issue a warning and return an empty dict if not env_path.isfile(): env_path = env_path.parent.parent / env_path.basename() if not env_path.isfile(): print( "Warning: could not find environment JSON file " "at '{path}'".format(path=env_path), file=sys.stderr, ) return dict() # Otherwise, load the file as JSON and return the resulting dict try: with open(env_path) as env_file: return json.load(env_file) except ValueError: print( "Error: Could not parse JSON " "in {path}".format(path=env_path), file=sys.stderr, ) sys.exit(1) @lazy def feature_flags(self): """ Return a dictionary of feature flags configured by the environment. """ return self.env_tokens.get('FEATURES', dict())
agpl-3.0
JaviMerino/workload-automation
wlauto/core/entry_point.py
2
2969
# Copyright 2013-2015 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sys import argparse import logging from wlauto.core.bootstrap import settings from wlauto.core.extension_loader import ExtensionLoader from wlauto.exceptions import WAError from wlauto.utils.misc import get_traceback from wlauto.utils.log import init_logging from wlauto.utils.cli import init_argument_parser from wlauto.utils.doc import format_body import warnings warnings.filterwarnings(action='ignore', category=UserWarning, module='zope') logger = logging.getLogger('command_line') def load_commands(subparsers): ext_loader = ExtensionLoader(paths=settings.extension_paths) for command in ext_loader.list_commands(): settings.commands[command.name] = ext_loader.get_command(command.name, subparsers=subparsers) def main(): try: description = ("Execute automated workloads on a remote device and process " "the resulting output.\n\nUse \"wa <subcommand> -h\" to see " "help for individual subcommands.") parser = argparse.ArgumentParser(description=format_body(description, 80), prog='wa', formatter_class=argparse.RawDescriptionHelpFormatter, ) init_argument_parser(parser) load_commands(parser.add_subparsers(dest='command')) # each command will add its own subparser args = parser.parse_args() settings.verbosity = args.verbose settings.debug = args.debug if args.config: settings.update(args.config) init_logging(settings.verbosity) command = settings.commands[args.command] sys.exit(command.execute(args)) except KeyboardInterrupt: logging.info('Got CTRL-C. Aborting.') sys.exit(3) except WAError, e: logging.critical(e) sys.exit(1) except SyntaxError as e: message = 'Syntax Error in {}, line {}, offset {}:' logging.critical(message.format(e.filename, e.lineno, e.offset)) logging.critical('\t{}'.format(e.msg)) tb = get_traceback() logging.critical(tb) sys.exit(2) except Exception as e: # pylint: disable=broad-except tb = get_traceback() logging.critical(tb) logging.critical('{}({})'.format(e.__class__.__name__, e)) sys.exit(2)
apache-2.0
mcgachey/edx-platform
lms/djangoapps/certificates/management/commands/fix_ungraded_certs.py
29
1708
""" Management command which fixes ungraded certificates for students """ from certificates.models import GeneratedCertificate from courseware import grades, courses from django.test.client import RequestFactory from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): """ Management command to find and grade all students that need to be graded. """ help = """ Find all students that need to be graded and grade them. """ option_list = BaseCommand.option_list + ( make_option( '-n', '--noop', action='store_true', dest='noop', default=False, help="Print but do not update the GeneratedCertificate table" ), make_option( '-c', '--course', metavar='COURSE_ID', dest='course', default=False, help='Grade ungraded users for this course' ), ) def handle(self, *args, **options): course_id = options['course'] print "Fetching ungraded students for {0}".format(course_id) ungraded = GeneratedCertificate.objects.filter( course_id__exact=course_id).filter(grade__exact='') course = courses.get_course_by_id(course_id) factory = RequestFactory() request = factory.get('/') for cert in ungraded: # grade the student grade = grades.grade(cert.user, request, course) print "grading {0} - {1}".format(cert.user, grade['percent']) cert.grade = grade['percent'] if not options['noop']: cert.save()
agpl-3.0
pku9104038/edx-platform
lms/envs/devgroups/courses.py
68
1305
# We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=W0401, W0614 from ..dev import * CLASSES_TO_DBS = { 'BerkeleyX/CS169.1x/2012_Fall': "cs169.db", 'BerkeleyX/CS188.1x/2012_Fall': "cs188_1.db", 'HarvardX/CS50x/2012': "cs50.db", 'HarvardX/PH207x/2012_Fall': "ph207.db", 'MITx/3.091x/2012_Fall': "3091.db", 'MITx/6.002x/2012_Fall': "6002.db", 'MITx/6.00x/2012_Fall': "600.db", } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': '127.0.0.1:11211', 'KEY_FUNCTION': 'util.memcache.safe_key', }, 'general': { 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', 'LOCATION': '127.0.0.1:11211', 'KEY_PREFIX': 'general', 'VERSION': 5, 'KEY_FUNCTION': 'util.memcache.safe_key', } } SESSION_ENGINE = 'django.contrib.sessions.backends.cache' def path_for_db(db_name): return ENV_ROOT / "db" / db_name def course_db_for(course_id): db_name = CLASSES_TO_DBS[course_id] return { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': path_for_db(db_name) } }
agpl-3.0
ankazhao/python-sparselda
training/topic_words_stat_test.py
1
1541
#!/usr/bin/env python #coding=utf-8 # Copyright(c) 2013 python-sparselda project. # Author: Lifeng Wang (ofandywang@gmail.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import sys sys.path.append('..') from common.model import Model from common.vocabulary import Vocabulary from topic_words_stat import TopicWordsStat class TopicWordsStatTest(unittest.TestCase): def setUp(self): self.model = Model(20) self.model.load('../testdata/lda_model') self.vocabulary = Vocabulary() self.vocabulary.load('../testdata/vocabulary.dat') self.topic_words_stat = TopicWordsStat(self.model, self.vocabulary) def test_save(self): print self.topic_words_stat.save('../testdata/topic_top_words.dat', 0.8) def test_get_topic_top_words(self): print self.topic_words_stat.get_topic_top_words(0.8) def test_compute_topic_word_distribution(self): print self.topic_words_stat.compute_topic_word_distribution() if __name__ == '__main__': unittest.main()
apache-2.0
xedin/swift
utils/swift_build_sdk_interfaces.py
6
17227
#!/usr/bin/env python from __future__ import print_function import argparse import errno import itertools import json import multiprocessing import os import shutil import subprocess import sys import traceback BARE_INTERFACE_SEARCH_PATHS = [ "usr/lib/swift", "System/iOSSupport/usr/lib/swift" ] DEFAULT_FRAMEWORK_INTERFACE_SEARCH_PATHS = [ "System/Library/Frameworks", "System/iOSSupport/System/Library/Frameworks" ] STDLIB_NAME = 'Swift' MONOTONIC_VERSION = 1 def create_parser(): parser = argparse.ArgumentParser( description="Builds an SDK's swiftinterfaces into swiftmodules. " "Always searches usr/lib/swift in addition to whichever " "framework directories are passed on the command line.", prog=os.path.basename(__file__), usage='%(prog)s -o output/ [INTERFACE_SEARCH_DIRS]', epilog='Environment variables: SDKROOT, SWIFT_EXEC, ' 'SWIFT_FORCE_MODULE_LOADING') parser.add_argument('interface_framework_dirs', nargs='*', metavar='INTERFACE_SEARCH_DIRS', help='Relative paths to search for frameworks with ' 'interfaces (default: System/Library/Frameworks)') parser.add_argument('-o', dest='output_dir', help='Directory to which the output will be emitted ' '(required)') parser.add_argument('-j', dest='jobs', type=int, help='The number of parallel jobs to execute ' '(default: # of cores)') parser.add_argument('-v', dest='verbose', action='store_true', help='Print command invocations and progress info') parser.add_argument('-n', dest='dry_run', action='store_true', help='Dry run: don\'t actually run anything') parser.add_argument('-sdk', default=os.getenv('SDKROOT'), help='SDK to find frameworks and interfaces in ' '(default: $SDKROOT)') parser.add_argument('-F', dest='framework_dirs', metavar='DIR', action='append', default=[], help='Add additional framework search paths') parser.add_argument('-Fsystem', '-iframework', dest='system_framework_dirs', metavar='DIR', action='append', default=[], help='Add additional system framework search paths') parser.add_argument('-Fsystem-iosmac', dest='iosmac_system_framework_dirs', metavar='DIR', action='append', default=[], help='Add system framework search paths ' 'for iOSMac only') parser.add_argument('-I', dest='include_dirs', metavar='DIR', action='append', default=[], help='Add additional header/module search paths') parser.add_argument('-module-cache-path', help='Temporary directory to store intermediate info') parser.add_argument('-log-path', help='Directory to write stdout/stderr output to') parser.add_argument('-skip-stdlib', action='store_true', help='Don\'t build the standard library interface') parser.add_argument('-disable-modules-validate-system-headers', action='store_true', help='Disable modules verification for system headers') parser.add_argument('-xfails', metavar='PATH', help='JSON file containing an array of the modules ' 'expected to fail') parser.add_argument('-check-only', action='store_true', help='Assume the resulting modules will be thrown ' 'away (may be faster)') parser.add_argument('-ignore-non-stdlib-failures', action='store_true', help='Treat all modules but the stdlib as XFAILed') parser.add_argument('-debug-crash-compiler', action='store_true', help='Have the compiler crash (for testing purposes)') parser.add_argument('-machine-parseable-monotonic-version', action='store_true', help='For comparing versions of this tool') return parser def fatal(msg): print(msg, file=sys.stderr) sys.exit(1) def run_command(args, dry_run): if dry_run: return (0, "", "") proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: out, err = proc.communicate() exitcode = proc.returncode return (exitcode, out, err) except KeyboardInterrupt: proc.terminate() raise def make_dirs_if_needed(path, dry_run): if dry_run: return try: os.makedirs(path) except OSError as e: if e.errno != errno.EEXIST: raise class NegatedSet: def __init__(self, contents): self._contents = frozenset(contents) def __contains__(self, item): return item not in self._contents class ModuleFile: def __init__(self, name, path, is_expected_to_fail): self.name = name self.path = path self.is_expected_to_fail = is_expected_to_fail def collect_slices(xfails, swiftmodule_dir): if not os.path.isdir(swiftmodule_dir): return module_name, extension = \ os.path.splitext(os.path.basename(swiftmodule_dir)) assert extension == ".swiftmodule" is_xfail = module_name in xfails for entry in os.listdir(swiftmodule_dir): _, extension = os.path.splitext(entry) if extension == ".swiftinterface": yield ModuleFile(module_name, os.path.join(swiftmodule_dir, entry), is_xfail) def collect_framework_modules(sdk, xfails, sdk_relative_framework_dirs): for sdk_relative_framework_dir in sdk_relative_framework_dirs: framework_dir = os.path.join(sdk, sdk_relative_framework_dir) if not os.access(framework_dir, os.R_OK): continue for entry in os.listdir(framework_dir): path_without_extension, extension = os.path.splitext(entry) if extension != ".framework": continue module_name = os.path.basename(path_without_extension) swiftmodule = os.path.join(framework_dir, entry, "Modules", module_name + ".swiftmodule") if os.access(swiftmodule, os.R_OK): for x in collect_slices(xfails, swiftmodule): yield x def collect_non_framework_modules(sdk, xfails, sdk_relative_search_dirs): for sdk_relative_search_dir in sdk_relative_search_dirs: search_dir = os.path.join(sdk, sdk_relative_search_dir) for dir_path, _, file_names in os.walk(search_dir, followlinks=True): if os.path.splitext(dir_path)[1] == ".swiftmodule": for x in collect_slices(xfails, dir_path): yield x else: for interface in file_names: module_name, extension = os.path.splitext(interface) if extension == ".swiftinterface": is_xfail = module_name in xfails yield ModuleFile(module_name, os.path.join(dir_path, interface), is_xfail) def should_retry_compilation(stderr): if "has been modified since the module file" in stderr: return True if "mismatched umbrella headers in submodule" in stderr: return True if "is out of date and needs to be rebuilt: signature mismatch" in stderr: return True if "current parser token 'include'" in stderr: return True if "current parser token 'import'" in stderr: return True return False def run_with_module_cache_retry(command_args, module_cache_path, dry_run): """Hack: runs a command several times, clearing the module cache if we get an error about header files being modified during the run. This shouldn't be necessary (the cached PCM files should automatically be regenerated) but there seems to still be a bug in Clang that we haven't tracked down yet. """ RETRIES = 3 attempts_stderr = "" for r in range(RETRIES): status, stdout, stderr = run_command(command_args, dry_run) if status == 0: break if not should_retry_compilation(stderr): break if module_cache_path: shutil.rmtree(module_cache_path, ignore_errors=True) # If all retries fail, output information for each instance. attempts_stderr += ( "\n*** Compilation attempt {}/{} failed with modules bugs. " "Error output:\n".format(r + 1, RETRIES)) attempts_stderr += stderr stderr = attempts_stderr return (status, stdout, stderr) def log_output_to_file(content, module_name, interface_base, label, log_path): if not log_path: return if not content: return make_dirs_if_needed(log_path, dry_run=False) log_name = module_name + "-" + interface_base + "-" + label + ".txt" with open(os.path.join(log_path, log_name), "w") as output_file: output_file.write(content) def looks_like_iosmac(interface_base): return 'ios-macabi' in interface_base def rename_interface_for_iosmac_if_needed(interface_base, module_path): """Hack: Both macOS and iOSMac use 'x86_64' as the short name for a module interface file, and while we want to move away from this it's something we need to handle in the short term. Manually rename these to the full form of the target-specific module when we're obviously on macOS or iOSMac. """ if interface_base != 'x86_64': return interface_base if '/iOSSupport/' in module_path: return 'x86_64-apple-ios-macabi' if '/MacOS' in module_path: return 'x86_64-apple-macos' return interface_base def process_module(module_file): global args, shared_output_lock try: interface_base, _ = \ os.path.splitext(os.path.basename(module_file.path)) interface_base = \ rename_interface_for_iosmac_if_needed(interface_base, module_file.path) swiftc = os.getenv('SWIFT_EXEC', os.path.join(os.path.dirname(__file__), 'swiftc')) command_args = [ swiftc, '-frontend', '-build-module-from-parseable-interface', '-sdk', args.sdk, '-prebuilt-module-cache-path', args.output_dir, '-track-system-dependencies' ] module_cache_path = "" if args.module_cache_path: module_cache_path = os.path.join(args.module_cache_path, str(os.getpid())) command_args += ('-module-cache-path', module_cache_path) if args.debug_crash_compiler: command_args += ('-debug-crash-immediately',) if not args.check_only: command_args += ( '-serialize-parseable-module-interface-dependency-hashes',) if args.disable_modules_validate_system_headers: command_args += ( '-disable-modules-validate-system-headers',) # FIXME: This shouldn't be necessary, but the module name is checked # before the frontend action is. if module_file.name == STDLIB_NAME: command_args += ('-parse-stdlib',) if looks_like_iosmac(interface_base): for system_framework_path in args.iosmac_system_framework_dirs: command_args += ('-Fsystem', system_framework_path) command_args += ('-Fsystem', os.path.join(args.sdk, "System", "iOSSupport", "System", "Library", "Frameworks")) for include_path in args.include_dirs: command_args += ('-I', include_path) for system_framework_path in args.system_framework_dirs: command_args += ('-Fsystem', system_framework_path) for framework_path in args.framework_dirs: command_args += ('-F', framework_path) command_args += ('-module-name', module_file.name, module_file.path) output_path = os.path.join(args.output_dir, module_file.name + ".swiftmodule") if interface_base != module_file.name: make_dirs_if_needed(output_path, args.dry_run) output_path = os.path.join(output_path, interface_base + ".swiftmodule") command_args += ('-o', output_path) if args.verbose: with shared_output_lock: print("# Starting " + module_file.path) print(' '.join(command_args)) sys.stdout.flush() status, stdout, stderr = run_with_module_cache_retry( command_args, module_cache_path=module_cache_path, dry_run=args.dry_run) log_output_to_file(stdout, module_file.name, interface_base, "out", log_path=args.log_path) log_output_to_file(stderr, module_file.name, interface_base, "err", log_path=args.log_path) return (module_file, status, stdout, stderr) except BaseException: # We're catching everything here because we don't want to take down the # other jobs. return (module_file, 1, "", "".join(traceback.format_exception(*sys.exc_info()))) def set_up_child(parent_args, lock): global args, shared_output_lock args = parent_args shared_output_lock = lock def process_module_files(pool, module_files): results = pool.imap_unordered(process_module, module_files) overall_exit_status = 0 for (module_file, exit_status, stdout, stderr) in results: with shared_output_lock: if exit_status != 0: print("# ", end="") if module_file.is_expected_to_fail: print("(XFAIL) ", end="") else: print("(FAIL) ", end="") print(module_file.path) if (not module_file.is_expected_to_fail) or args.verbose: print(stdout, end="") print(stderr, end="", file=sys.stderr) elif module_file.is_expected_to_fail: print("# (UPASS) " + module_file.path) elif args.verbose: print("# (PASS) " + module_file.path) sys.stdout.flush() if overall_exit_status == 0 and \ not module_file.is_expected_to_fail: overall_exit_status = exit_status return overall_exit_status def main(): global args, shared_output_lock parser = create_parser() args = parser.parse_args() if args.machine_parseable_monotonic_version: print(MONOTONIC_VERSION) sys.exit(0) if 'SWIFT_FORCE_MODULE_LOADING' not in os.environ: os.environ['SWIFT_FORCE_MODULE_LOADING'] = 'prefer-serialized' if not args.output_dir: fatal("argument -o is required") if not args.sdk: fatal("SDKROOT must be set in the environment") if not os.path.isdir(args.sdk): fatal("invalid SDK: " + args.sdk) xfails = () if args.ignore_non_stdlib_failures: if args.xfails: print("warning: ignoring -xfails because " "-ignore-non-stdlib-failures was provided", file=sys.stderr) xfails = NegatedSet((STDLIB_NAME,)) elif args.xfails: with open(args.xfails) as xfails_file: xfails = json.load(xfails_file) make_dirs_if_needed(args.output_dir, args.dry_run) shared_output_lock = multiprocessing.Lock() pool = multiprocessing.Pool(args.jobs, set_up_child, (args, shared_output_lock)) interface_framework_dirs = (args.interface_framework_dirs or DEFAULT_FRAMEWORK_INTERFACE_SEARCH_PATHS) module_files = list(itertools.chain( collect_non_framework_modules(args.sdk, xfails, BARE_INTERFACE_SEARCH_PATHS), collect_framework_modules(args.sdk, xfails, interface_framework_dirs))) if not args.skip_stdlib: # Always do the stdlib first, so that we can use it in later steps stdlib_module_files = ( x for x in module_files if x.name == STDLIB_NAME) status = process_module_files(pool, stdlib_module_files) if status != 0: sys.exit(status) non_stdlib_module_files = ( x for x in module_files if x.name != STDLIB_NAME) status = process_module_files(pool, non_stdlib_module_files) sys.exit(status) if __name__ == '__main__': main()
apache-2.0
ct-23/home-assistant
tests/components/sensor/test_statistics.py
6
4036
"""The test for the statistics sensor platform.""" import unittest import statistics from homeassistant.setup import setup_component from homeassistant.const import (ATTR_UNIT_OF_MEASUREMENT, TEMP_CELSIUS) from tests.common import get_test_home_assistant class TestStatisticsSensor(unittest.TestCase): """Test the Statistics sensor.""" def setup_method(self, method): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant() self.values = [17, 20, 15.2, 5, 3.8, 9.2, 6.7, 14, 6] self.count = len(self.values) self.min = min(self.values) self.max = max(self.values) self.total = sum(self.values) self.mean = round(sum(self.values) / len(self.values), 2) self.median = round(statistics.median(self.values), 2) self.deviation = round(statistics.stdev(self.values), 2) self.variance = round(statistics.variance(self.values), 2) self.change = self.values[-1] - self.values[0] self.average_change = self.change / (len(self.values) - 1) def teardown_method(self, method): """Stop everything that was started.""" self.hass.stop() def test_binary_sensor_source(self): """Test if source is a sensor.""" values = [1, 0, 1, 0, 1, 0, 1] assert setup_component(self.hass, 'sensor', { 'sensor': { 'platform': 'statistics', 'name': 'test', 'entity_id': 'binary_sensor.test_monitored', } }) for value in values: self.hass.states.set('binary_sensor.test_monitored', value) self.hass.block_till_done() state = self.hass.states.get('sensor.test_count') self.assertEqual(str(len(values)), state.state) def test_sensor_source(self): """Test if source is a sensor.""" assert setup_component(self.hass, 'sensor', { 'sensor': { 'platform': 'statistics', 'name': 'test', 'entity_id': 'sensor.test_monitored', } }) for value in self.values: self.hass.states.set('sensor.test_monitored', value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}) self.hass.block_till_done() state = self.hass.states.get('sensor.test_mean') self.assertEqual(str(self.mean), state.state) self.assertEqual(self.min, state.attributes.get('min_value')) self.assertEqual(self.max, state.attributes.get('max_value')) self.assertEqual(self.variance, state.attributes.get('variance')) self.assertEqual(self.median, state.attributes.get('median')) self.assertEqual(self.deviation, state.attributes.get('standard_deviation')) self.assertEqual(self.mean, state.attributes.get('mean')) self.assertEqual(self.count, state.attributes.get('count')) self.assertEqual(self.total, state.attributes.get('total')) self.assertEqual('°C', state.attributes.get('unit_of_measurement')) self.assertEqual(self.change, state.attributes.get('change')) self.assertEqual(self.average_change, state.attributes.get('average_change')) def test_sampling_size(self): """Test rotation.""" assert setup_component(self.hass, 'sensor', { 'sensor': { 'platform': 'statistics', 'name': 'test', 'entity_id': 'sensor.test_monitored', 'sampling_size': 5, } }) for value in self.values: self.hass.states.set('sensor.test_monitored', value, {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}) self.hass.block_till_done() state = self.hass.states.get('sensor.test_mean') self.assertEqual(3.8, state.attributes.get('min_value')) self.assertEqual(14, state.attributes.get('max_value'))
apache-2.0
DMRookie/RoomAI
roomai/kuhn/KuhnPokerEnv.py
1
7063
#!/bin/python import random import math import copy import roomai.kuhn.KuhnPokerUtils import roomai.common logger = roomai.get_logger() class KuhnPokerEnv(roomai.common.AbstractEnv): ''' The KuhnPoker game environment ''' #@override def init(self, params=dict()): ''' Initialize the KuhnPoker game environment :param params: the initialization params :return: infos, public_state, person_states, private_state ''' self.__params__ = dict() if "record_history" in params: self.__params__["record_history"] = params["record_history"] else: self.__params__["record_history"] = False if "start_turn" in params: self.__params__["start_turn"] = params["start_turn"] else: self.__params__["start_turn"] = int(random.random() * 2) self.__params__["num_players"] = 2 if "num_players" in params: logger.warning("KuhnPoker is a game of two players and the number of players always be 2. Ingores the \"num_players\" option") self.available_action = dict() self.available_action[roomai.kuhn.KuhnPokerUtils.KuhnPokerAction("check").key] = roomai.kuhn.KuhnPokerAction.lookup("check") self.available_action[roomai.kuhn.KuhnPokerUtils.KuhnPokerAction("bet").key] = roomai.kuhn.KuhnPokerAction.lookup("bet") self.private_state = roomai.kuhn.KuhnPokerUtils.KuhnPokerPrivateState() self.public_state = roomai.kuhn.KuhnPokerUtils.KuhnPokerPublicState() self.person_states = [roomai.kuhn.KuhnPokerUtils.KuhnPokerPersonState() for i in range(2)] card0 = math.floor(random.random() * 3) card1 = math.floor(random.random() * 3) while card0 == card1: card0 = math.floor(random.random() * 3) self.public_state.__turn__ = self.__params__["start_turn"] self.public_state.__first__ = self.public_state.turn self.public_state.__epoch__ = 0 self.public_state.__action_list__ = [] self.public_state.__is_terminal__ = False self.public_state.__scores__ = None self.person_states[0].__id__ = 0 self.person_states[0].__number__ = card0 self.person_states[1].__id__ = 1 self.person_states[1].__number__ = card1 self.person_states[self.public_state.turn].__available_actions__ = self.available_action self.__gen_history__() infos = self.__gen_infos__() return infos, self.public_state, self.person_states, self.private_state #@override def forward(self, action): """ The KuhnPoker game environment steps with the action taken by the current player :param action :returns:infos, public_state, person_states, private_state """ self.person_states[self.public_state.turn].__available_actions__ = dict() self.public_state.__epoch__ += 1 self.public_state.__turn__ = (self.public_state.turn+1)%2 self.public_state.__action_list__.append(action.key) if self.public_state.epoch == 1: self.public_state.__is_terminal__ = False self.public_state.__scores__ = [] self.person_states[self.public_state.turn].__available_actions__ = self.available_action self.__gen_history__() infos = self.__gen_infos__() return infos, self.public_state, self.person_states, self.private_state elif self.public_state.epoch == 2: scores = self.__evalute_two_round__() if scores is not None: self.public_state.__is_terminal__ = True self.public_state.__scores__ = scores self.__gen_history__() infos = self.__gen_infos__() return infos,self.public_state, self.person_states, self.private_state else: self.public_state.is_terminal = False self.public_state.scores = [] self.person_states[self.public_state.turn].available_actions = self.available_action self.__gen_history__() infos = self.__gen_infos__() return infos,self.public_state, self.person_states, self.private_state elif self.public_state.epoch == 3: self.public_state.is_terminal = True self.public_state.scores = self.__evalute_three_round__() self.__gen_history__() infos = self.__gen_infos__() return infos,self.public_state, self.person_states, self.private_state else: raise Exception("KuhnPoker has 3 turns at most") #@Overide @classmethod def compete(cls, env, players): ''' Use the game environment to hold a compete for the players :param env: The game environment :param players: The players :return: scores for the players ''' infos, public_state, person_state, private_state = env.init() for i in range(len(players)): players[i].receive_info(infos[i]) while public_state.is_terminal == False: turn = infos[-1].public_state.turn action = players[turn].take_action() infos,public_state, person_state, private_state = env.forward(action) for i in range(len(players)): players[i].receive_info(infos[i]) return public_state.scores def __higher_number_player__(self): if self.person_states[0].number > self.person_states[1].number: return 0 else: return 1 def __evalute_two_round__(self): win = self.__higher_number_player__() first = self.public_state.first scores = [0, 0]; actions = self.public_state.action_list if actions[0] == "check" and \ actions[1] == "bet": return None if actions[0] == actions[1] and \ actions[0] == "check": scores[win] = 1; scores[1-win] = -1 return scores; if actions[0] == "bet" and \ actions[1] == "check": scores[first] = 1; scores[1-first] = -1 return scores; if actions[0] == actions[1] and \ actions[0] == "bet": scores[win] = 2 scores[1-win] = -2 return scores; def __evalute_three_round__(self): first = self.public_state.first win = self.__higher_number_player__() scores = [0, 0] if self.public_state.action_list[2] == "check": scores[1 - first] = 1; scores[first] = -1 else: scores[win] = 2; scores[1-win] = -2 return scores;
mit
eglxiang/Med
test_transformationOutput.py
1
8775
# -*- coding: utf-8 -*- """ Created on Thu Apr 13 12:10:33 2017 @author: apezeshk """ from matplotlib import pyplot as plt import numpy as np #Use this to display a single slice of the original/transformed patch in one figure, #and every slice of the original and the transformed copies in another figure. This can be used #to verify the images look right both in terms of the transformations as well as the cropping #s.t. the nodule is centered in the patch. #The last few lines can also be used to verify that the simple rotation/flipped augmentations #look alright, and that they are all different with one another. #nodOrig = np.load('/diskStation/LIDC/36368/pos_36368_test/p0451_20000101_s3000315_0.npy') nodOld = np.load('/diskStation/LIDC/36368/pos_36368/p1012_20000101_s32231_0.npy').astype('int16') nodTrans1 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m00.npy') nodTrans2 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m01.npy') nodTrans3 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m02.npy') nodTrans4 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m03.npy') nodTrans5 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m04.npy') nodTrans6 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m05.npy') nodTrans7 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m06.npy') nodTrans8 = np.load('/diskStation/LIDC/36368/pos_aug_aux_36368/p1012_20000101_s32231_0_m07.npy') i = 4; #slice number to be shown in the first figure plt.figure(); plt.subplot(1,5,1); plt.imshow(nodOld[:,:,i], cmap = 'gray'); plt.subplot(1,5,2); plt.imshow(nodTrans1[:,:,i], cmap = 'gray'); plt.subplot(1,5,3); plt.imshow(nodTrans2[:,:,i], cmap = 'gray'); plt.subplot(1,5,4); plt.imshow(nodTrans3[:,:,i], cmap = 'gray'); plt.subplot(1,5,5); plt.imshow(nodTrans4[:,:,i], cmap = 'gray'); plt.figure() plt.subplot(8,5,1); plt.imshow(nodOld[:,:,0], cmap = 'gray'); plt.subplot(8,5,6); plt.imshow(nodOld[:,:,1], cmap = 'gray'); plt.subplot(8,5,11); plt.imshow(nodOld[:,:,2], cmap = 'gray'); plt.subplot(8,5,16); plt.imshow(nodOld[:,:,3], cmap = 'gray'); plt.subplot(8,5,21); plt.imshow(nodOld[:,:,4], cmap = 'gray'); plt.subplot(8,5,26); plt.imshow(nodOld[:,:,5], cmap = 'gray'); plt.subplot(8,5,31); plt.imshow(nodOld[:,:,6], cmap = 'gray'); plt.subplot(8,5,36); plt.imshow(nodOld[:,:,7], cmap = 'gray'); plt.subplot(8,5,2); plt.imshow(nodTrans1[:,:,0], cmap = 'gray'); plt.subplot(8,5,7); plt.imshow(nodTrans1[:,:,1], cmap = 'gray'); plt.subplot(8,5,12); plt.imshow(nodTrans1[:,:,2], cmap = 'gray'); plt.subplot(8,5,17); plt.imshow(nodTrans1[:,:,3], cmap = 'gray'); plt.subplot(8,5,22); plt.imshow(nodTrans1[:,:,4], cmap = 'gray'); plt.subplot(8,5,27); plt.imshow(nodTrans1[:,:,5], cmap = 'gray'); plt.subplot(8,5,32); plt.imshow(nodTrans1[:,:,6], cmap = 'gray'); plt.subplot(8,5,37); plt.imshow(nodTrans1[:,:,7], cmap = 'gray'); plt.subplot(8,5,3); plt.imshow(nodTrans2[:,:,0], cmap = 'gray'); plt.subplot(8,5,8); plt.imshow(nodTrans2[:,:,1], cmap = 'gray'); plt.subplot(8,5,13); plt.imshow(nodTrans2[:,:,2], cmap = 'gray'); plt.subplot(8,5,18); plt.imshow(nodTrans2[:,:,3], cmap = 'gray'); plt.subplot(8,5,23); plt.imshow(nodTrans2[:,:,4], cmap = 'gray'); plt.subplot(8,5,28); plt.imshow(nodTrans2[:,:,5], cmap = 'gray'); plt.subplot(8,5,33); plt.imshow(nodTrans2[:,:,6], cmap = 'gray'); plt.subplot(8,5,38); plt.imshow(nodTrans2[:,:,7], cmap = 'gray'); plt.subplot(8,5,4); plt.imshow(nodTrans3[:,:,0], cmap = 'gray'); plt.subplot(8,5,9); plt.imshow(nodTrans3[:,:,1], cmap = 'gray'); plt.subplot(8,5,14); plt.imshow(nodTrans3[:,:,2], cmap = 'gray'); plt.subplot(8,5,19); plt.imshow(nodTrans3[:,:,3], cmap = 'gray'); plt.subplot(8,5,24); plt.imshow(nodTrans3[:,:,4], cmap = 'gray'); plt.subplot(8,5,29); plt.imshow(nodTrans3[:,:,5], cmap = 'gray'); plt.subplot(8,5,34); plt.imshow(nodTrans3[:,:,6], cmap = 'gray'); plt.subplot(8,5,39); plt.imshow(nodTrans3[:,:,7], cmap = 'gray'); plt.subplot(8,5,5); plt.imshow(nodTrans4[:,:,0], cmap = 'gray'); plt.subplot(8,5,10); plt.imshow(nodTrans4[:,:,1], cmap = 'gray'); plt.subplot(8,5,15); plt.imshow(nodTrans4[:,:,2], cmap = 'gray'); plt.subplot(8,5,20); plt.imshow(nodTrans4[:,:,3], cmap = 'gray'); plt.subplot(8,5,25); plt.imshow(nodTrans4[:,:,4], cmap = 'gray'); plt.subplot(8,5,30); plt.imshow(nodTrans4[:,:,5], cmap = 'gray'); plt.subplot(8,5,35); plt.imshow(nodTrans4[:,:,6], cmap = 'gray'); plt.subplot(8,5,40); plt.imshow(nodTrans4[:,:,7], cmap = 'gray'); plt.figure() plt.subplot(8,5,1); plt.imshow(nodOld[:,:,0], cmap = 'gray'); plt.subplot(8,5,6); plt.imshow(nodOld[:,:,1], cmap = 'gray'); plt.subplot(8,5,11); plt.imshow(nodOld[:,:,2], cmap = 'gray'); plt.subplot(8,5,16); plt.imshow(nodOld[:,:,3], cmap = 'gray'); plt.subplot(8,5,21); plt.imshow(nodOld[:,:,4], cmap = 'gray'); plt.subplot(8,5,26); plt.imshow(nodOld[:,:,5], cmap = 'gray'); plt.subplot(8,5,31); plt.imshow(nodOld[:,:,6], cmap = 'gray'); plt.subplot(8,5,36); plt.imshow(nodOld[:,:,7], cmap = 'gray'); plt.subplot(8,5,2); plt.imshow(nodTrans5[:,:,0], cmap = 'gray'); plt.subplot(8,5,7); plt.imshow(nodTrans5[:,:,1], cmap = 'gray'); plt.subplot(8,5,12); plt.imshow(nodTrans5[:,:,2], cmap = 'gray'); plt.subplot(8,5,17); plt.imshow(nodTrans5[:,:,3], cmap = 'gray'); plt.subplot(8,5,22); plt.imshow(nodTrans5[:,:,4], cmap = 'gray'); plt.subplot(8,5,27); plt.imshow(nodTrans5[:,:,5], cmap = 'gray'); plt.subplot(8,5,32); plt.imshow(nodTrans5[:,:,6], cmap = 'gray'); plt.subplot(8,5,37); plt.imshow(nodTrans5[:,:,7], cmap = 'gray'); plt.subplot(8,5,3); plt.imshow(nodTrans6[:,:,0], cmap = 'gray'); plt.subplot(8,5,8); plt.imshow(nodTrans6[:,:,1], cmap = 'gray'); plt.subplot(8,5,13); plt.imshow(nodTrans6[:,:,2], cmap = 'gray'); plt.subplot(8,5,18); plt.imshow(nodTrans6[:,:,3], cmap = 'gray'); plt.subplot(8,5,23); plt.imshow(nodTrans6[:,:,4], cmap = 'gray'); plt.subplot(8,5,28); plt.imshow(nodTrans6[:,:,5], cmap = 'gray'); plt.subplot(8,5,33); plt.imshow(nodTrans6[:,:,6], cmap = 'gray'); plt.subplot(8,5,38); plt.imshow(nodTrans6[:,:,7], cmap = 'gray'); plt.subplot(8,5,4); plt.imshow(nodTrans7[:,:,0], cmap = 'gray'); plt.subplot(8,5,9); plt.imshow(nodTrans7[:,:,1], cmap = 'gray'); plt.subplot(8,5,14); plt.imshow(nodTrans7[:,:,2], cmap = 'gray'); plt.subplot(8,5,19); plt.imshow(nodTrans7[:,:,3], cmap = 'gray'); plt.subplot(8,5,24); plt.imshow(nodTrans7[:,:,4], cmap = 'gray'); plt.subplot(8,5,29); plt.imshow(nodTrans7[:,:,5], cmap = 'gray'); plt.subplot(8,5,34); plt.imshow(nodTrans7[:,:,6], cmap = 'gray'); plt.subplot(8,5,39); plt.imshow(nodTrans7[:,:,7], cmap = 'gray'); plt.subplot(8,5,5); plt.imshow(nodTrans8[:,:,0], cmap = 'gray'); plt.subplot(8,5,10); plt.imshow(nodTrans8[:,:,1], cmap = 'gray'); plt.subplot(8,5,15); plt.imshow(nodTrans8[:,:,2], cmap = 'gray'); plt.subplot(8,5,20); plt.imshow(nodTrans8[:,:,3], cmap = 'gray'); plt.subplot(8,5,25); plt.imshow(nodTrans8[:,:,4], cmap = 'gray'); plt.subplot(8,5,30); plt.imshow(nodTrans8[:,:,5], cmap = 'gray'); plt.subplot(8,5,35); plt.imshow(nodTrans8[:,:,6], cmap = 'gray'); plt.subplot(8,5,40); plt.imshow(nodTrans8[:,:,7], cmap = 'gray'); ################################################################################ # This part is to display the regular augmentations (simple flip/rotations) ################################################################################ plt.figure; nodSimple1 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_r11.npy') nodSimple2 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_r21.npy') nodSimple3 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_r31.npy') nodSimple4 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_f11.npy') nodSimple5 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_f12.npy') nodSimple6 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_b11.npy') nodSimple7 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_b12.npy') nodSimple8 = np.load('/diskStation/LIDC/36368/pos_aug_0_36368/p1012_20000101_s32231_0_b21.npy') plt.subplot(2,4,1); plt.imshow(nodSimple1[:,:,i], cmap = 'gray'); plt.subplot(2,4,2); plt.imshow(nodSimple2[:,:,i], cmap = 'gray'); plt.subplot(2,4,3); plt.imshow(nodSimple3[:,:,i], cmap = 'gray'); plt.subplot(2,4,4); plt.imshow(nodSimple4[:,:,i], cmap = 'gray'); plt.subplot(2,4,5); plt.imshow(nodSimple5[:,:,i], cmap = 'gray'); plt.subplot(2,4,6); plt.imshow(nodSimple6[:,:,i], cmap = 'gray'); plt.subplot(2,4,7); plt.imshow(nodSimple7[:,:,i], cmap = 'gray'); plt.subplot(2,4,8); plt.imshow(nodSimple8[:,:,i], cmap = 'gray');
bsd-2-clause
danielricks/autoplay
agents/agentWord2Vec.py
1
13779
import random as rand import scholar.scholar as sch import os, random, copy, re import cPickle as pickle import nltk class AgentWord2Vec: # Sets all initial learning variables of behavior parameters def __init__(self, initial_epsilon, training_cycles): # GLOBAL VARIABLES # Used for making Word2Vec queries self.s = sch.Scholar() # Used for saving noun:verb lists in memory to limit the number of Word2Vec queries self.verb_dict = {} # Used for continuing to work within a game_text when the last command(s) didn't give you good output self.last_good_game_text = '' # Refreshes the debug log file to be empty at the beginning of a game run self.debug = True if self.debug: open('debugAgentWord2Vec.txt', 'w').close() open('bad_commands.txt', 'w').close() # open('good_commands.txt', 'w').close() # Used to guarantee that the same command won't be run twice in a row self.last_command = '' # Used to keep track of the commands that can be run in a given area (Resets when a unique output is encountered) self.possible_commands = [] # Used for guaranteeing that commands that have failed in the past will not be executed again if os.path.exists('word_stale_dict.w2v') and os.path.exists('word_used_commands.w2v'): self.stale_output, self.used_commands = self.load_memories() else: # Used for keeping track of unique outputs and removing the possibility of re-running commands that lead to non-unique outputs self.stale_output = {} self.used_commands = {} # PARAMETERS # Used to set the number of characters in the game_text that are unique # So ( I don't know the word "shove".) turns to ( I don't know the wo) self.OUTPUT_CHARACTER_COUNT = 20 # Used to set the number of commands that lead to an output before it's not considered 'unique' self.ARBITRARY_COMMAND_CONTROL_COUNT = 30 # Used as verbs for every noun in command generation self.STANDARD_VERBS = ['open', 'take'] # Used to set the number of commands returned when Word2Vec is queried. # (Make sure that this number is smaller than self.ARBITRARY_COMMAND_CONTROL_COUNT) # Also keep in mind that the first x verbs are the standard verbs, and are included in this number self.COMMANDS_RETURNED_COUNT = 15 # Prepares the agent for a new round of training (i.e. prepares to restart the game, but does not erase any learned knowledge) def refresh(self): self.total_points_earned = 0 # This is a not-so-stupid, now very complicated agent. def take_action(self, game_text, evaluation_flag = False): # Take note of output given command game_text_clip = game_text[:self.OUTPUT_CHARACTER_COUNT] self.track_output(game_text_clip) # If the output is bad... (if the game is just beginning or if the output to command ratio is 1:20 ish) if self.last_good_game_text != '' and len(self.stale_output[game_text_clip]) >= self.ARBITRARY_COMMAND_CONTROL_COUNT: # We used to just append to a list, now we take advantage of Python's dictionary's use of hashing try: self.used_commands[self.last_command] += 1 except: self.used_commands[self.last_command] = 0 if self.debug: with open('bad_commands.txt', 'a') as f: f.write(self.last_command + '\n') # If there are still commands to be run... if len(self.possible_commands) > 0: # Take the first command in the list... current_command = self.possible_commands[0] # Remove it from the list del self.possible_commands[0] # Save the command self.last_command = current_command self.write_to_file('Running command: ' + current_command + ' out of ' + str(len(self.possible_commands) + 1) + '\n') # Return the command return current_command else: # with open('good_commands.txt', 'a') as f: # f.write(self.last_command + '\n') # If the output is good and we need commands to run... self.write_to_file(':::Relearning commands:::' + '\n') # Part-of-speech tag the game_text tagged_game_text = self.get_tagged_text(game_text) # Get commands from the tagged_game_text (nouns and noun phrases) local_commands = self.get_commands(tagged_game_text) # Shuffle the list of commands to add pseudo-randomness to the process list_commands = list(local_commands) random.shuffle(list_commands) self.possible_commands = list_commands # If there are now commands to run... if len(self.possible_commands) > 0: # Take the first command in the list... current_command = self.possible_commands[0] del self.possible_commands[0] # If that exact command was not run last time... if current_command != self.last_command: # Save the command and game_text self.last_command = current_command self.last_good_game_text = game_text self.write_to_file('Running command: ' + current_command + ' out of ' + str(len(self.possible_commands) + 1) + '\n') # Return the command return current_command else: #(else break and run a random movement instead) self.write_to_file(current_command + ' is the same as ' + self.last_command + '\n') # This should run when (the game_text is bad and there are no possible commands to be run) and when # (the game_text is good but it can't find any valid commands to run) return self.get_random_movement_command() # Returns the list of commands given a list of identified nouns or noun phrases def get_commands(self, tagged_game_text): self.write_to_file("Tagged_game_text: " + str(tagged_game_text) + '\n') single_tagged_nouns = re.findall(r'[A-Za-z]+_NN[S]*', tagged_game_text) compound_tagged_nouns = re.findall(r'[A-Za-z]+_[J|N]+ [A-Za-z]+_NN[S]*', tagged_game_text) all_tagged_nouns = single_tagged_nouns + compound_tagged_nouns # Take out duplicates (remove 'door_NN' when 'trap_NN door_NN' is avilable) # This is no longer necessary ('take large egg' doesn't work, while 'take egg' does) if False: # For every noun phrase... for compound_tagged_noun in compound_tagged_nouns: second_word_and_tag = compound_tagged_noun.split()[1] # For every noun... for single_tagged_noun in single_tagged_nouns: # If the second word in the noun phrase (guaranteed to be a noun) and the noun are the same... if single_tagged_noun.lower() == second_word_and_tag.lower(): # Remove the single noun from the list if single_tagged_noun in all_tagged_nouns: all_tagged_nouns.remove(single_tagged_noun) score = [] tagged_words_in_game_text = tagged_game_text.split() for word_index in xrange(len(tagged_words_in_game_text)): current_tagged_word = tagged_words_in_game_text[word_index] other_tagged_words = [] for inner_word_index in xrange(len(tagged_words_in_game_text)): if inner_word_index != word_index: other_tagged_words.append(tagged_words_in_game_text[inner_word_index]) current_score = 0 score.append(current_score) # If there are nouns in the list... if len(all_tagged_nouns) > 0: local_commands = set() # For every tagged noun (or noun phrase) for tagged_noun in all_tagged_nouns: # If it's actually a noun phrase (ex. 'wooden_JJ table_NN' or 'trap_NN door_NN') if ' ' in tagged_noun: first_word_and_tag = tagged_noun.split()[0].split('_') second_word_and_tag = tagged_noun.split()[1].split('_') # If the tag of the second word in the noun phrase is actually 'NN' (and not 'NNS', 'NNP') if second_word_and_tag[1] == 'NN' or second_word_and_tag[1] == 'NNS': # Ensure that every word is lowercase lower_tagged_noun_list = [first_word_and_tag[0].lower() + '_' + first_word_and_tag[1], second_word_and_tag[0].lower() + '_' + second_word_and_tag[1]] # Get commands for the phrase noun_commands = self.get_commands_for_noun(lower_tagged_noun_list) # For every command, add it to the list for noun_command in noun_commands: local_commands.add(noun_command) else: # For every noun (not noun phrases) (ex. 'door_NN') if tagged_noun.split('_')[1] == 'NN': # Ensure that every word is lowercase lower_tagged_noun_list = [tagged_noun.split('_')[0].lower() + '_NN'] # Get commands for the noun noun_commands = self.get_commands_for_noun(lower_tagged_noun_list) # For every command, add it to the list for noun_command in noun_commands: local_commands.add(noun_command) return local_commands return [] # Handles whether output should be considered 'bad' or not by keeping track of commands to output def track_output(self, game_text_clip): # The {output:command} is saved in a dictionary, so that we can track how many inputs return which outputs. # If there are tons of commands that return the same output, those particular commands are not worth executing. # We track them here. # Side note: We only track the first 20 chararacters in the output so that we can catch all # the "I don't know the word [word]" phrases as single outputs. try: if len(self.stale_output[game_text_clip]) < self.ARBITRARY_COMMAND_CONTROL_COUNT: self.stale_output[game_text_clip].add(self.last_command) except: self.stale_output[game_text_clip] = set() if len(self.stale_output[game_text_clip]) < self.ARBITRARY_COMMAND_CONTROL_COUNT: self.stale_output[game_text_clip].add(self.last_command) # Write the stale output to commands dictionary to the file self.write_to_file('\t' + str(self.stale_output) + '\n') # Writes to the debug file def write_to_file(self, text): if self.debug: with open('debugAgentWord2Vec.txt', 'a') as f: f.write(text) # Return a random movement command (or 'look') def get_random_movement_command(self): direction = rand.random() if direction < 0.10: return 'n' elif direction < 0.20: return 's' elif direction < 0.30: return 'e' elif direction < 0.40: return 'w' elif direction < 0.50: return 'ne' elif direction < 0.60: return 'nw' elif direction < 0.70: return 'se' elif direction < 0.80: return 'sw' elif direction < 0.85: return 'u' elif direction < 0.90: return 'd' elif direction < 0.95: return 'look' else: return 'i' # Return a list of commands for a given list of tagged words ('door_NN', 'trap_NN door_NN', 'wooden_JJ door_NN', etc.) def get_commands_for_noun(self, tagged_list): # Get a list of verbs for the noun tagged_noun = tagged_list[-1] verbs = self.get_verbs_for_noun(tagged_noun) # Add every verb plus the noun to the command list commands = [] noun = tagged_noun.split('_')[0] for verb in verbs: # Add a describing word if it exists if len(tagged_list) > 1: command = verb + ' ' + tagged_list[0].split('_')[0] + ' ' + noun else: command = verb + ' ' + noun # Take advantage of Python's hashing to quickly check whether a command is valid try: self.used_commands[command] += 1 except: commands.append(command) self.write_to_file(str(tagged_list) + '\t' + ",".join(commands) + '\n') # Return the list of commands (only a specified number are returned) return commands[:self.COMMANDS_RETURNED_COUNT] # Returns a list of verbs for a given tagged noun def get_verbs_for_noun(self, tagged_noun): # Begin with a list of standard verbs verbs = copy.deepcopy(self.STANDARD_VERBS) # Separate the noun from the list and its tag noun = tagged_noun.split('_')[0] # If the noun is too short to be a word, return nothing (this happened in the case of 'c' and 't') if len(noun) < 2: return [] # Get the list of verbs for a noun, whether stored in memory... if tagged_noun in self.verb_dict.keys(): tagged_verbs = self.verb_dict[tagged_noun] else: # Or from Word2Vec directly if tagged_noun.split('_')[1] == '_NNS': tagged_verbs = self.s.get_verbs_plural(noun, 20) # with open('good_commands.txt', 'a') as f: # f.write(self.last_command + '\t' + str(tagged_verbs) + '\n') else: tagged_verbs = self.s.get_verbs(noun, 20) self.write_to_file(str(tagged_verbs) + '\n') self.verb_dict[tagged_noun] = tagged_verbs # Add every verb (without its tag) to a list and return it for tagged_verb in tagged_verbs: verb = tagged_verb.split('_')[0] verbs.append(verb) return verbs # Save agent progress to a pickle file def write_memories(self): pickle.dump(self.stale_output, open('word_stale_dict.w2v', 'wb')) pickle.dump(self.used_commands, open('word_used_commands.w2v', 'wb')) # Load agent progress from a pickle file def load_memories(self): return pickle.load(open('word_stale_dict.w2v', 'rb')), pickle.load(open('word_used_commands.w2v', 'rb')) # If overwritten in a derived class, this function should still be sure to update total_points_earned def update(self, reward, new_game_text): self.total_points_earned += reward # Generates a 'status update' which will be printed to the screen by autoplay.py def get_status(self): return 'TOTAL POINTS = ' + str(self.total_points_earned) # Useful for writing date files that track obtained reward over time def get_total_points_earned(): return self.total_points_earned # Returns a string containing data such as learning rate, loss function, and various hyperparameters which can then be written to a data file. # This is very helpful when you want to repeat a learning run, but can't remember which hyperparameters you used the first time. def get_learning_parameters(filename): pass # Returns a sentence that has been tagged by NLTK. def get_tagged_text(self, text): tokenized_text = nltk.word_tokenize(text) pos_tagged_text = nltk.pos_tag(tokenized_text) formatted_tagged_text = [] for tuple_word in pos_tagged_text: formatted_tagged_text.append(tuple_word[0] + '_' + tuple_word[1]) return formatted_tagged_text
gpl-3.0
theho/pyho
travis_pypi_setup.py
2
3750
#!/usr/bin/env python # -*- coding: utf-8 -*- """Update encrypted deploy password in Travis config file """ from __future__ import print_function import base64 import json import os from getpass import getpass import yaml from cryptography.hazmat.primitives.serialization import load_pem_public_key from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15 try: from urllib import urlopen except: from urllib.request import urlopen GITHUB_REPO = 'jimmyho/pyho' TRAVIS_CONFIG_FILE = os.path.join( os.path.dirname(os.path.abspath(__file__)), '.travis.yml') def load_key(pubkey): """Load public RSA key, with work-around for keys using incorrect header/footer format. Read more about RSA encryption with cryptography: https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/ """ try: return load_pem_public_key(pubkey.encode(), default_backend()) except ValueError: # workaround for https://github.com/travis-ci/travis-api/issues/196 pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END') return load_pem_public_key(pubkey.encode(), default_backend()) def encrypt(pubkey, password): """Encrypt password using given RSA public key and encode it with base64. The encrypted password can only be decrypted by someone with the private key (in this case, only Travis). """ key = load_key(pubkey) encrypted_password = key.encrypt(password, PKCS1v15()) return base64.b64encode(encrypted_password) def fetch_public_key(repo): """Download RSA public key Travis will use for this repo. Travis API docs: http://docs.travis-ci.com/api/#repository-keys """ keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo) data = json.loads(urlopen(keyurl).read().decode()) if 'key' not in data: errmsg = "Could not find public key for repo: {}.\n".format(repo) errmsg += "Have you already added your GitHub repo to Travis?" raise ValueError(errmsg) return data['key'] def prepend_line(filepath, line): """Rewrite a file adding a line to its beginning. """ with open(filepath) as f: lines = f.readlines() lines.insert(0, line) with open(filepath, 'w') as f: f.writelines(lines) def load_yaml_config(filepath): with open(filepath) as f: return yaml.load(f) def save_yaml_config(filepath, config): with open(filepath, 'w') as f: yaml.dump(config, f, default_flow_style=False) def update_travis_deploy_password(encrypted_password): """Update the deploy section of the .travis.yml file to use the given encrypted password. """ config = load_yaml_config(TRAVIS_CONFIG_FILE) config['deploy']['password'] = dict(secure=encrypted_password) save_yaml_config(TRAVIS_CONFIG_FILE, config) line = ('# This file was autogenerated and will overwrite' ' each time you run travis_pypi_setup.py\n') prepend_line(TRAVIS_CONFIG_FILE, line) def main(args): public_key = fetch_public_key(args.repo) password = args.password or getpass('PyPI password: ') update_travis_deploy_password(encrypt(public_key, password.encode())) print("Wrote encrypted password to .travis.yml -- you're ready to deploy") if '__main__' == __name__: import argparse parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--repo', default=GITHUB_REPO, help='GitHub repo (default: %s)' % GITHUB_REPO) parser.add_argument('--password', help='PyPI password (will prompt if not provided)') args = parser.parse_args() main(args)
mit
kb1lqc/Faraday-Software
faraday/proxyio/cc430radioconfig.py
2
3365
def freq0_carrier_calculation(freq_desired, fxosc=26.0, debug=False): """ Calculates the FREQ0, FREQ1, and FREQ2 24 bit word for main carrier frequency of the CC430. :param freq_desired: Frequency in MHz that is desired to tune to (Integer or float) :param fxosc: CC430 High Frequency crystal frequency in MHZ (Integer or float). Default = 26.0MHz :param debug: If True then the function will print additional information about the calculation process :return: A list\: [FREQ2, FREQ1, FREQ0, STR(actual frequency of carrier in MHz)] .. note:: The CC430 high frequency crystal for Faraday is 26.0 MHz .. warning:: Farday only supports the Amateur Radio 900MHz band (902-928MHz). """ #Calculate the smallest bit resolution in VC0 based on crystal vco_step_float = float(fxosc * 10**6) / 2**16 #Calculate 24 bit word needed for desired frequency nd return INT desired_freq_word_int = int((float(freq_desired) * 10**6) / vco_step_float) desired_freq_step_count_int = int(desired_freq_word_int) * vco_step_float actual_freq_step_count_float = int(desired_freq_step_count_int) / float((10**6)) #Convert 24 bit word into hex string and parse into FREQ2, FREQ1, and FREQ0 desired_freq_word_int_hex = hex(desired_freq_word_int) FREQx_list = [desired_freq_word_int_hex[i:i + 2] for i in range(0, len(desired_freq_word_int_hex), 2)] #Create list of INT's of the 3 bytes for FREQx's FREQx_list[0] = int(FREQx_list[1], 16) #FREQ2 FREQx_list[1] = int(FREQx_list[2], 16) #FREQ1 FREQx_list[2] = int(FREQx_list[3], 16) #FREQ0 FREQx_list[3] = actual_freq_step_count_float #append actual frequency for reference if debug: print "vco_step_float (Hz) =", vco_step_float print "24-bit word for desired frequency (int): ", int(desired_freq_step_count_int) print "24-bit word for desired frequency (hex): ", hex(desired_freq_word_int) #Calculate actual achieved frequency due to VCO step size print "Actual result frequency (MHz): ", actual_freq_step_count_float #RETURN list of FREQx bytes and actual achieved frequency return FREQx_list def freq0_reverse_carrier_calculation(freq0, freq1, freq2, fxosc=26.0, debug=False): """ This function reverse calculates the CC430 frequency in MHz from the known freq[] bytes in the CC430 radio registers. :param freq0: Frequency byte index 0 :param freq1: Frequency byte index 1 :param freq2: Frequency byte index 2 :param fxosc: CC430 High Frequency crystal frequency in MHZ (Integer or float). efault = 26.0MHz :param debug: If True then the function will print additional information about the calculation process :return: Returns the calculated frequency in MHz as a float. .. note:: The CC430 high frequency crystal for Faraday is 26.0 MHz """ #Calculate the smallest bit resolution in VC0 based on crystal vco_step_float = float(fxosc * 10**6) / 2**16 #print steps for rebuild freq0_shifted = freq0 << 16 freq1_shifted = freq1 << 8 freq2_shifted = freq2 actual_freq_mhz = (int(hex(freq0_shifted + freq1_shifted + freq2_shifted), 16) * vco_step_float) / float(10**6) if (debug == 1): print freq0_shifted, freq1_shifted, freq2_shifted print actual_freq_mhz return actual_freq_mhz
gpl-3.0
glogiotatidis/mozillians-new
vendor-local/lib/python/kombu/utils/compat.py
13
10475
""" kombu.utils.compat ================== Helps compatibility with older Python versions. :copyright: (c) 2009 - 2012 by Ask Solem. :license: BSD, see LICENSE for more details. """ import sys ############## collections.OrderedDict ####################################### import weakref try: from collections import MutableMapping except ImportError: from UserDict import DictMixin as MutableMapping # noqa from itertools import imap as _imap from operator import eq as _eq class _Link(object): """Doubly linked list.""" __slots__ = 'prev', 'next', 'key', '__weakref__' class CompatOrderedDict(dict, MutableMapping): """Dictionary that remembers insertion order""" # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular # dictionaries. # The internal self.__map dictionary maps keys to links in a doubly # linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # The prev/next links are weakref proxies (to prevent circular # references). # Individual links are kept alive by the hard reference in self.__map. # Those hard references disappear when a key is deleted from # an OrderedDict. __marker = object() def __init__(self, *args, **kwds): """Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. """ if len(args) > 1: raise TypeError("expected at most 1 arguments, got %d" % ( len(args))) try: self.__root except AttributeError: # sentinel node for the doubly linked list self.__root = root = _Link() root.prev = root.next = root self.__map = {} self.update(*args, **kwds) def clear(self): "od.clear() -> None. Remove all items from od." root = self.__root root.prev = root.next = root self.__map.clear() dict.clear(self) def __setitem__(self, key, value): "od.__setitem__(i, y) <==> od[i]=y" # Setting a new item creates a new link which goes at the end of the # linked list, and the inherited dictionary is updated with the new # key/value pair. if key not in self: self.__map[key] = link = _Link() root = self.__root last = root.prev link.prev, link.next, link.key = last, root, key last.next = root.prev = weakref.proxy(link) dict.__setitem__(self, key, value) def __delitem__(self, key): """od.__delitem__(y) <==> del od[y]""" # Deleting an existing item uses self.__map to find the # link which is then removed by updating the links in the # predecessor and successor nodes. dict.__delitem__(self, key) link = self.__map.pop(key) link.prev.next = link.next link.next.prev = link.prev def __iter__(self): """od.__iter__() <==> iter(od)""" # Traverse the linked list in order. root = self.__root curr = root.next while curr is not root: yield curr.key curr = curr.next def __reversed__(self): """od.__reversed__() <==> reversed(od)""" # Traverse the linked list in reverse order. root = self.__root curr = root.prev while curr is not root: yield curr.key curr = curr.prev def __reduce__(self): """Return state information for pickling""" items = [[k, self[k]] for k in self] tmp = self.__map, self.__root del(self.__map, self.__root) inst_dict = vars(self).copy() self.__map, self.__root = tmp if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def setdefault(self, key, default=None): try: return self[key] except KeyError: self[key] = default return default def update(self, other=(), **kwds): if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, "keys"): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value def pop(self, key, default=__marker): try: value = self[key] except KeyError: if default is self.__marker: raise return default else: del self[key] return value def values(self): return [self[key] for key in self] def items(self): return [(key, self[key]) for key in self] def itervalues(self): for key in self: yield self[key] def iteritems(self): for key in self: yield (key, self[key]) def iterkeys(self): return iter(self) def keys(self): return list(self) def popitem(self, last=True): """od.popitem() -> (k, v) Return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. """ if not self: raise KeyError('dictionary is empty') if last: if sys.platform.startswith("java"): key = self.keys()[-1] else: key = reversed(self).next() else: key = iter(self).next() value = self.pop(key) return key, value def __repr__(self): "od.__repr__() <==> repr(od)" if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) def copy(self): "od.copy() -> a shallow copy of od" return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): """OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None).""" d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): """od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive.""" if isinstance(other, OrderedDict): return len(self) == len(other) and \ all(_imap(_eq, self.iteritems(), other.iteritems())) return dict.__eq__(self, other) def __ne__(self, other): return not (self == other) try: from collections import OrderedDict except ImportError: OrderedDict = CompatOrderedDict # noqa ############## queue.LifoQueue ############################################## from Queue import Queue class LifoQueue(Queue): def _init(self, maxsize): self.queue = [] self.maxsize = maxsize def _qsize(self, len=len): return len(self.queue) def _put(self, item): self.queue.append(item) def _get(self): return self.queue.pop() ############## logging.handlers.WatchedFileHandler ########################## import logging import os import platform as _platform from stat import ST_DEV, ST_INO if _platform.system() == "Windows": #since windows doesn't go with WatchedFileHandler use FileHandler instead WatchedFileHandler = logging.FileHandler else: try: from logging.handlers import WatchedFileHandler except ImportError: class WatchedFileHandler(logging.FileHandler): # noqa """ A handler for logging to a file, which watches the file to see if it has changed while in use. This can happen because of usage of programs such as newsyslog and logrotate which perform log file rotation. This handler, intended for use under Unix, watches the file to see if it has changed since the last emit. (A file has changed if its device or inode have changed.) If it has changed, the old file stream is closed, and the file opened to get a new stream. This handler is not appropriate for use under Windows, because under Windows open files cannot be moved or renamed - logging opens the files with exclusive locks - and so there is no need for such a handler. Furthermore, ST_INO is not supported under Windows; stat always returns zero for this value. This handler is based on a suggestion and patch by Chad J. Schroeder. """ def __init__(self, *args, **kwargs): logging.FileHandler.__init__(self, *args, **kwargs) if not os.path.exists(self.baseFilename): self.dev, self.ino = -1, -1 else: stat = os.stat(self.baseFilename) self.dev, self.ino = stat[ST_DEV], stat[ST_INO] def emit(self, record): """ Emit a record. First check if the underlying file has changed, and if it has, close the old stream and reopen the file to get the current stream. """ if not os.path.exists(self.baseFilename): stat = None changed = 1 else: stat = os.stat(self.baseFilename) changed = ((stat[ST_DEV] != self.dev) or (stat[ST_INO] != self.ino)) if changed and self.stream is not None: self.stream.flush() self.stream.close() self.stream = self._open() if stat is None: stat = os.stat(self.baseFilename) self.dev, self.ino = stat[ST_DEV], stat[ST_INO] logging.FileHandler.emit(self, record)
bsd-3-clause
ChameleonCloud/horizon
openstack_dashboard/utils/config.py
3
2247
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ This module contains utility functions for loading Horizon's configuration from .ini files using the oslo.config library. """ import six from oslo_config import cfg # XXX import the actual config groups here # from openstack_dashboard.config import config_compress def load_config(files=None, root_path=None, local_path=None): """Load the configuration from specified files.""" config = cfg.ConfigOpts() config.register_opts([ cfg.Opt('root_path', default=root_path), cfg.Opt('local_path', default=local_path), ]) # XXX register actual config groups here # theme_group = config_theme.register_config(config) if files is not None: config(args=[], default_config_files=files) return config def apply_config(config, target): """Apply the configuration on the specified settings module.""" # TODO(rdopiera) fill with actual config groups # apply_config_group(config.email, target, 'email') def apply_config_group(config_group, target, prefix=None): for key, value in six.iteritems(config_group): name = key.upper() if prefix: name = '_'.join([prefix.upper(), name]) target[name] = value def list_options(): # This is a really nasty hack to make the translatable strings # work without having to initialize Django and read all the settings. from django.apps import registry from django.conf import settings settings.configure() registry.apps.check_apps_ready = lambda: True config = load_config() return [ (name, [d['opt'] for d in group._opts.values()]) for (name, group) in config._groups.items() ]
apache-2.0
studio666/cjdns
node_build/dependencies/libuv/build/gyp/pylib/gyp/generator/msvs.py
13
124736
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import collections import copy import ntpath import os import posixpath import re import subprocess import sys import gyp.common import gyp.easy_xml as easy_xml import gyp.MSVSNew as MSVSNew import gyp.MSVSProject as MSVSProject import gyp.MSVSSettings as MSVSSettings import gyp.MSVSToolFile as MSVSToolFile import gyp.MSVSUserFile as MSVSUserFile import gyp.MSVSUtil as MSVSUtil import gyp.MSVSVersion as MSVSVersion from gyp.common import GypError # TODO: Remove once bots are on 2.7, http://crbug.com/241769 def _import_OrderedDict(): import collections try: return collections.OrderedDict except AttributeError: import gyp.ordered_dict return gyp.ordered_dict.OrderedDict OrderedDict = _import_OrderedDict() # Regular expression for validating Visual Studio GUIDs. If the GUID # contains lowercase hex letters, MSVS will be fine. However, # IncrediBuild BuildConsole will parse the solution file, but then # silently skip building the target causing hard to track down errors. # Note that this only happens with the BuildConsole, and does not occur # if IncrediBuild is executed from inside Visual Studio. This regex # validates that the string looks like a GUID with all uppercase hex # letters. VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$') generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '.exe', 'STATIC_LIB_PREFIX': '', 'SHARED_LIB_PREFIX': '', 'STATIC_LIB_SUFFIX': '.lib', 'SHARED_LIB_SUFFIX': '.dll', 'INTERMEDIATE_DIR': '$(IntDir)', 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate', 'OS': 'win', 'PRODUCT_DIR': '$(OutDir)', 'LIB_DIR': '$(OutDir)lib', 'RULE_INPUT_ROOT': '$(InputName)', 'RULE_INPUT_DIRNAME': '$(InputDir)', 'RULE_INPUT_EXT': '$(InputExt)', 'RULE_INPUT_NAME': '$(InputFileName)', 'RULE_INPUT_PATH': '$(InputPath)', 'CONFIGURATION_NAME': '$(ConfigurationName)', } # The msvs specific sections that hold paths generator_additional_path_sections = [ 'msvs_cygwin_dirs', 'msvs_props', ] generator_additional_non_configuration_keys = [ 'msvs_cygwin_dirs', 'msvs_cygwin_shell', 'msvs_large_pdb', 'msvs_shard', 'msvs_external_builder', 'msvs_external_builder_out_dir', 'msvs_external_builder_build_cmd', 'msvs_external_builder_clean_cmd', ] # List of precompiled header related keys. precomp_keys = [ 'msvs_precompiled_header', 'msvs_precompiled_source', ] cached_username = None cached_domain = None # Based on http://code.activestate.com/recipes/576694/. class OrderedSet(collections.MutableSet): def __init__(self, iterable=None): self.end = end = [] end += [None, end, end] # sentinel node for doubly linked list self.map = {} # key --> [key, prev, next] if iterable is not None: self |= iterable def __len__(self): return len(self.map) def discard(self, key): if key in self.map: key, prev, next = self.map.pop(key) prev[2] = next next[1] = prev def __contains__(self, key): return key in self.map def add(self, key): if key not in self.map: end = self.end curr = end[1] curr[2] = end[1] = self.map[key] = [key, curr, end] def update(self, iterable): for i in iterable: if i not in self: self.add(i) def __iter__(self): end = self.end curr = end[2] while curr is not end: yield curr[0] curr = curr[2] # TODO(gspencer): Switch the os.environ calls to be # win32api.GetDomainName() and win32api.GetUserName() once the # python version in depot_tools has been updated to work on Vista # 64-bit. def _GetDomainAndUserName(): if sys.platform not in ('win32', 'cygwin'): return ('DOMAIN', 'USERNAME') global cached_username global cached_domain if not cached_domain or not cached_username: domain = os.environ.get('USERDOMAIN') username = os.environ.get('USERNAME') if not domain or not username: call = subprocess.Popen(['net', 'config', 'Workstation'], stdout=subprocess.PIPE) config = call.communicate()[0] username_re = re.compile('^User name\s+(\S+)', re.MULTILINE) username_match = username_re.search(config) if username_match: username = username_match.group(1) domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE) domain_match = domain_re.search(config) if domain_match: domain = domain_match.group(1) cached_domain = domain cached_username = username return (cached_domain, cached_username) fixpath_prefix = None def _NormalizedSource(source): """Normalize the path. But not if that gets rid of a variable, as this may expand to something larger than one directory. Arguments: source: The path to be normalize.d Returns: The normalized path. """ normalized = os.path.normpath(source) if source.count('$') == normalized.count('$'): source = normalized return source def _FixPath(path): """Convert paths to a form that will make sense in a vcproj file. Arguments: path: The path to convert, may contain / etc. Returns: The path with all slashes made into backslashes. """ if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': path = os.path.join(fixpath_prefix, path) path = path.replace('/', '\\') path = _NormalizedSource(path) if path and path[-1] == '\\': path = path[:-1] return path def _FixPaths(paths): """Fix each of the paths of the list.""" return [_FixPath(i) for i in paths] def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None): """Converts a list split source file paths into a vcproj folder hierarchy. Arguments: sources: A list of source file paths split. prefix: A list of source file path layers meant to apply to each of sources. excluded: A set of excluded files. msvs_version: A MSVSVersion object. Returns: A hierarchy of filenames and MSVSProject.Filter objects that matches the layout of the source tree. For example: _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], prefix=['joe']) --> [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] """ if not prefix: prefix = [] result = [] excluded_result = [] folders = OrderedDict() # Gather files into the final result, excluded, or folders. for s in sources: if len(s) == 1: filename = _NormalizedSource('\\'.join(prefix + s)) if filename in excluded: excluded_result.append(filename) else: result.append(filename) elif msvs_version and not msvs_version.UsesVcxproj(): # For MSVS 2008 and earlier, we need to process all files before walking # the sub folders. if not folders.get(s[0]): folders[s[0]] = [] folders[s[0]].append(s[1:]) else: contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]], excluded=excluded, list_excluded=list_excluded, msvs_version=msvs_version) contents = MSVSProject.Filter(s[0], contents=contents) result.append(contents) # Add a folder for excluded files. if excluded_result and list_excluded: excluded_folder = MSVSProject.Filter('_excluded_files', contents=excluded_result) result.append(excluded_folder) if msvs_version and msvs_version.UsesVcxproj(): return result # Populate all the folders. for f in folders: contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], excluded=excluded, list_excluded=list_excluded, msvs_version=msvs_version) contents = MSVSProject.Filter(f, contents=contents) result.append(contents) return result def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): if not value: return _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): # TODO(bradnelson): ugly hack, fix this more generally!!! if 'Directories' in setting or 'Dependencies' in setting: if type(value) == str: value = value.replace('/', '\\') else: value = [i.replace('/', '\\') for i in value] if not tools.get(tool_name): tools[tool_name] = dict() tool = tools[tool_name] if tool.get(setting): if only_if_unset: return if type(tool[setting]) == list and type(value) == list: tool[setting] += value else: raise TypeError( 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' 'not allowed, previous value: %s' % ( value, setting, tool_name, str(tool[setting]))) else: tool[setting] = value def _ConfigPlatform(config_data): return config_data.get('msvs_configuration_platform', 'Win32') def _ConfigBaseName(config_name, platform_name): if config_name.endswith('_' + platform_name): return config_name[0:-len(platform_name) - 1] else: return config_name def _ConfigFullName(config_name, config_data): platform_name = _ConfigPlatform(config_data) return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env): if [x for x in cmd if '$(InputDir)' in x]: input_dir_preamble = ( 'set INPUTDIR=$(InputDir)\n' 'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n' 'set INPUTDIR=%INPUTDIR:~0,-1%\n' ) else: input_dir_preamble = '' if cygwin_shell: # Find path to cygwin. cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) # Prepare command. direct_cmd = cmd direct_cmd = [i.replace('$(IntDir)', '`cygpath -m "${INTDIR}"`') for i in direct_cmd] direct_cmd = [i.replace('$(OutDir)', '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] direct_cmd = [i.replace('$(InputDir)', '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] if has_input_path: direct_cmd = [i.replace('$(InputPath)', '`cygpath -m "${INPUTPATH}"`') for i in direct_cmd] direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) direct_cmd = ' '.join(direct_cmd) # TODO(quote): regularize quoting path names throughout the module cmd = '' if do_setup_env: cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' cmd += 'set CYGWIN=nontsec&& ' if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' if direct_cmd.find('INTDIR') >= 0: cmd += 'set INTDIR=$(IntDir)&& ' if direct_cmd.find('OUTDIR') >= 0: cmd += 'set OUTDIR=$(OutDir)&& ' if has_input_path and direct_cmd.find('INPUTPATH') >= 0: cmd += 'set INPUTPATH=$(InputPath) && ' cmd += 'bash -c "%(cmd)s"' cmd = cmd % {'cygwin_dir': cygwin_dir, 'cmd': direct_cmd} return input_dir_preamble + cmd else: # Convert cat --> type to mimic unix. if cmd[0] == 'cat': command = ['type'] else: command = [cmd[0].replace('/', '\\')] # Add call before command to ensure that commands can be tied together one # after the other without aborting in Incredibuild, since IB makes a bat # file out of the raw command string, and some commands (like python) are # actually batch files themselves. command.insert(0, 'call') # Fix the paths # TODO(quote): This is a really ugly heuristic, and will miss path fixing # for arguments like "--arg=path" or "/opt:path". # If the argument starts with a slash or dash, it's probably a command line # switch arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]] arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments] arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] if quote_cmd: # Support a mode for using cmd directly. # Convert any paths to native form (first element is used directly). # TODO(quote): regularize quoting path names throughout the module arguments = ['"%s"' % i for i in arguments] # Collapse into a single command. return input_dir_preamble + ' '.join(command + arguments) def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): # Currently this weird argument munging is used to duplicate the way a # python script would need to be run as part of the chrome tree. # Eventually we should add some sort of rule_default option to set this # per project. For now the behavior chrome needs is the default. mcs = rule.get('msvs_cygwin_shell') if mcs is None: mcs = int(spec.get('msvs_cygwin_shell', 1)) elif isinstance(mcs, str): mcs = int(mcs) quote_cmd = int(rule.get('msvs_quote_cmd', 1)) return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env) def _AddActionStep(actions_dict, inputs, outputs, description, command): """Merge action into an existing list of actions. Care must be taken so that actions which have overlapping inputs either don't get assigned to the same input, or get collapsed into one. Arguments: actions_dict: dictionary keyed on input name, which maps to a list of dicts describing the actions attached to that input file. inputs: list of inputs outputs: list of outputs description: description of the action command: command line to execute """ # Require there to be at least one input (call sites will ensure this). assert inputs action = { 'inputs': inputs, 'outputs': outputs, 'description': description, 'command': command, } # Pick where to stick this action. # While less than optimal in terms of build time, attach them to the first # input for now. chosen_input = inputs[0] # Add it there. if chosen_input not in actions_dict: actions_dict[chosen_input] = [] actions_dict[chosen_input].append(action) def _AddCustomBuildToolForMSVS(p, spec, primary_input, inputs, outputs, description, cmd): """Add a custom build tool to execute something. Arguments: p: the target project spec: the target project dict primary_input: input file to attach the build tool to inputs: list of inputs outputs: list of outputs description: description of the action cmd: command line to execute """ inputs = _FixPaths(inputs) outputs = _FixPaths(outputs) tool = MSVSProject.Tool( 'VCCustomBuildTool', {'Description': description, 'AdditionalDependencies': ';'.join(inputs), 'Outputs': ';'.join(outputs), 'CommandLine': cmd, }) # Add to the properties of primary input for each config. for config_name, c_data in spec['configurations'].iteritems(): p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]) def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): """Add actions accumulated into an actions_dict, merging as needed. Arguments: p: the target project spec: the target project dict actions_dict: dictionary keyed on input name, which maps to a list of dicts describing the actions attached to that input file. """ for primary_input in actions_dict: inputs = OrderedSet() outputs = OrderedSet() descriptions = [] commands = [] for action in actions_dict[primary_input]: inputs.update(OrderedSet(action['inputs'])) outputs.update(OrderedSet(action['outputs'])) descriptions.append(action['description']) commands.append(action['command']) # Add the custom build step for one input file. description = ', and also '.join(descriptions) command = '\r\n'.join(commands) _AddCustomBuildToolForMSVS(p, spec, primary_input=primary_input, inputs=inputs, outputs=outputs, description=description, cmd=command) def _RuleExpandPath(path, input_file): """Given the input file to which a rule applied, string substitute a path. Arguments: path: a path to string expand input_file: the file to which the rule applied. Returns: The string substituted path. """ path = path.replace('$(InputName)', os.path.splitext(os.path.split(input_file)[1])[0]) path = path.replace('$(InputDir)', os.path.dirname(input_file)) path = path.replace('$(InputExt)', os.path.splitext(os.path.split(input_file)[1])[1]) path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) path = path.replace('$(InputPath)', input_file) return path def _FindRuleTriggerFiles(rule, sources): """Find the list of files which a particular rule applies to. Arguments: rule: the rule in question sources: the set of all known source files for this project Returns: The list of sources that trigger a particular rule. """ return rule.get('rule_sources', []) def _RuleInputsAndOutputs(rule, trigger_file): """Find the inputs and outputs generated by a rule. Arguments: rule: the rule in question. trigger_file: the main trigger for this rule. Returns: The pair of (inputs, outputs) involved in this rule. """ raw_inputs = _FixPaths(rule.get('inputs', [])) raw_outputs = _FixPaths(rule.get('outputs', [])) inputs = OrderedSet() outputs = OrderedSet() inputs.add(trigger_file) for i in raw_inputs: inputs.add(_RuleExpandPath(i, trigger_file)) for o in raw_outputs: outputs.add(_RuleExpandPath(o, trigger_file)) return (inputs, outputs) def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): """Generate a native rules file. Arguments: p: the target project rules: the set of rules to include output_dir: the directory in which the project/gyp resides spec: the project dict options: global generator options """ rules_filename = '%s%s.rules' % (spec['target_name'], options.suffix) rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename), spec['target_name']) # Add each rule. for r in rules: rule_name = r['rule_name'] rule_ext = r['extension'] inputs = _FixPaths(r.get('inputs', [])) outputs = _FixPaths(r.get('outputs', [])) # Skip a rule with no action and no inputs. if 'action' not in r and not r.get('rule_sources', []): continue cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True) rules_file.AddCustomBuildRule(name=rule_name, description=r.get('message', rule_name), extensions=[rule_ext], additional_dependencies=inputs, outputs=outputs, cmd=cmd) # Write out rules file. rules_file.WriteIfChanged() # Add rules file to project. p.AddToolFile(rules_filename) def _Cygwinify(path): path = path.replace('$(OutDir)', '$(OutDirCygwin)') path = path.replace('$(IntDir)', '$(IntDirCygwin)') return path def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add): """Generate an external makefile to do a set of rules. Arguments: rules: the list of rules to include output_dir: path containing project and gyp files spec: project specification data sources: set of sources known options: global generator options actions_to_add: The list of actions we will add to. """ filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) # Find cygwin style versions of some paths. mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') # Gather stuff needed to emit all: target. all_inputs = OrderedSet() all_outputs = OrderedSet() all_output_dirs = OrderedSet() first_outputs = [] for rule in rules: trigger_files = _FindRuleTriggerFiles(rule, sources) for tf in trigger_files: inputs, outputs = _RuleInputsAndOutputs(rule, tf) all_inputs.update(OrderedSet(inputs)) all_outputs.update(OrderedSet(outputs)) # Only use one target from each rule as the dependency for # 'all' so we don't try to build each rule multiple times. first_outputs.append(list(outputs)[0]) # Get the unique output directories for this rule. output_dirs = [os.path.split(i)[0] for i in outputs] for od in output_dirs: all_output_dirs.add(od) first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] # Write out all: target, including mkdir for each output directory. mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg)) for od in all_output_dirs: if od: mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) mk_file.write('\n') # Define how each output is generated. for rule in rules: trigger_files = _FindRuleTriggerFiles(rule, sources) for tf in trigger_files: # Get all the inputs and outputs for this rule for this trigger file. inputs, outputs = _RuleInputsAndOutputs(rule, tf) inputs = [_Cygwinify(i) for i in inputs] outputs = [_Cygwinify(i) for i in outputs] # Prepare the command line for this rule. cmd = [_RuleExpandPath(c, tf) for c in rule['action']] cmd = ['"%s"' % i for i in cmd] cmd = ' '.join(cmd) # Add it to the makefile. mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) mk_file.write('\t%s\n\n' % cmd) # Close up the file. mk_file.close() # Add makefile to list of sources. sources.add(filename) # Add a build action to call makefile. cmd = ['make', 'OutDir=$(OutDir)', 'IntDir=$(IntDir)', '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', '-f', filename] cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) # Insert makefile as 0'th input, so it gets the action attached there, # as this is easier to understand from in the IDE. all_inputs = list(all_inputs) all_inputs.insert(0, filename) _AddActionStep(actions_to_add, inputs=_FixPaths(all_inputs), outputs=_FixPaths(all_outputs), description='Running external rules for %s' % spec['target_name'], command=cmd) def _EscapeEnvironmentVariableExpansion(s): """Escapes % characters. Escapes any % characters so that Windows-style environment variable expansions will leave them alone. See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile to understand why we have to do this. Args: s: The string to be escaped. Returns: The escaped string. """ s = s.replace('%', '%%') return s quote_replacer_regex = re.compile(r'(\\*)"') def _EscapeCommandLineArgumentForMSVS(s): """Escapes a Windows command-line argument. So that the Win32 CommandLineToArgv function will turn the escaped result back into the original string. See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx ("Parsing C++ Command-Line Arguments") to understand why we have to do this. Args: s: the string to be escaped. Returns: the escaped string. """ def _Replace(match): # For a literal quote, CommandLineToArgv requires an odd number of # backslashes preceding it, and it produces half as many literal backslashes # (rounded down). So we need to produce 2n+1 backslashes. return 2 * match.group(1) + '\\"' # Escape all quotes so that they are interpreted literally. s = quote_replacer_regex.sub(_Replace, s) # Now add unescaped quotes so that any whitespace is interpreted literally. s = '"' + s + '"' return s delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') def _EscapeVCProjCommandLineArgListItem(s): """Escapes command line arguments for MSVS. The VCProj format stores string lists in a single string using commas and semi-colons as separators, which must be quoted if they are to be interpreted literally. However, command-line arguments may already have quotes, and the VCProj parser is ignorant of the backslash escaping convention used by CommandLineToArgv, so the command-line quotes and the VCProj quotes may not be the same quotes. So to store a general command-line argument in a VCProj list, we need to parse the existing quoting according to VCProj's convention and quote any delimiters that are not already quoted by that convention. The quotes that we add will also be seen by CommandLineToArgv, so if backslashes precede them then we also have to escape those backslashes according to the CommandLineToArgv convention. Args: s: the string to be escaped. Returns: the escaped string. """ def _Replace(match): # For a non-literal quote, CommandLineToArgv requires an even number of # backslashes preceding it, and it produces half as many literal # backslashes. So we need to produce 2n backslashes. return 2 * match.group(1) + '"' + match.group(2) + '"' segments = s.split('"') # The unquoted segments are at the even-numbered indices. for i in range(0, len(segments), 2): segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) # Concatenate back into a single string s = '"'.join(segments) if len(segments) % 2 == 0: # String ends while still quoted according to VCProj's convention. This # means the delimiter and the next list item that follow this one in the # .vcproj file will be misinterpreted as part of this item. There is nothing # we can do about this. Adding an extra quote would correct the problem in # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + 'quotes in ' + s) return s def _EscapeCppDefineForMSVS(s): """Escapes a CPP define so that it will reach the compiler unaltered.""" s = _EscapeEnvironmentVariableExpansion(s) s = _EscapeCommandLineArgumentForMSVS(s) s = _EscapeVCProjCommandLineArgListItem(s) # cl.exe replaces literal # characters with = in preprocesor definitions for # some reason. Octal-encode to work around that. s = s.replace('#', '\\%03o' % ord('#')) return s quote_replacer_regex2 = re.compile(r'(\\+)"') def _EscapeCommandLineArgumentForMSBuild(s): """Escapes a Windows command-line argument for use by MSBuild.""" def _Replace(match): return (len(match.group(1)) / 2 * 4) * '\\' + '\\"' # Escape all quotes so that they are interpreted literally. s = quote_replacer_regex2.sub(_Replace, s) return s def _EscapeMSBuildSpecialCharacters(s): escape_dictionary = { '%': '%25', '$': '%24', '@': '%40', "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A' } result = ''.join([escape_dictionary.get(c, c) for c in s]) return result def _EscapeCppDefineForMSBuild(s): """Escapes a CPP define so that it will reach the compiler unaltered.""" s = _EscapeEnvironmentVariableExpansion(s) s = _EscapeCommandLineArgumentForMSBuild(s) s = _EscapeMSBuildSpecialCharacters(s) # cl.exe replaces literal # characters with = in preprocesor definitions for # some reason. Octal-encode to work around that. s = s.replace('#', '\\%03o' % ord('#')) return s def _GenerateRulesForMSVS(p, output_dir, options, spec, sources, excluded_sources, actions_to_add): """Generate all the rules for a particular project. Arguments: p: the project output_dir: directory to emit rules to options: global options passed to the generator spec: the specification for this project sources: the set of all known source files in this project excluded_sources: the set of sources excluded from normal processing actions_to_add: deferred list of actions to add in """ rules = spec.get('rules', []) rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] # Handle rules that use a native rules file. if rules_native: _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) # Handle external rules (non-native rules). if rules_external: _GenerateExternalRules(rules_external, output_dir, spec, sources, options, actions_to_add) _AdjustSourcesForRules(spec, rules, sources, excluded_sources) def _AdjustSourcesForRules(spec, rules, sources, excluded_sources): # Add outputs generated by each rule (if applicable). for rule in rules: # Done if not processing outputs as sources. if int(rule.get('process_outputs_as_sources', False)): # Add in the outputs from this rule. trigger_files = _FindRuleTriggerFiles(rule, sources) for trigger_file in trigger_files: inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) inputs = OrderedSet(_FixPaths(inputs)) outputs = OrderedSet(_FixPaths(outputs)) inputs.remove(_FixPath(trigger_file)) sources.update(inputs) if not spec.get('msvs_external_builder'): excluded_sources.update(inputs) sources.update(outputs) def _FilterActionsFromExcluded(excluded_sources, actions_to_add): """Take inputs with actions attached out of the list of exclusions. Arguments: excluded_sources: list of source files not to be built. actions_to_add: dict of actions keyed on source file they're attached to. Returns: excluded_sources with files that have actions attached removed. """ must_keep = OrderedSet(_FixPaths(actions_to_add.keys())) return [s for s in excluded_sources if s not in must_keep] def _GetDefaultConfiguration(spec): return spec['configurations'][spec['default_configuration']] def _GetGuidOfProject(proj_path, spec): """Get the guid for the project. Arguments: proj_path: Path of the vcproj or vcxproj file to generate. spec: The target dictionary containing the properties of the target. Returns: the guid. Raises: ValueError: if the specified GUID is invalid. """ # Pluck out the default configuration. default_config = _GetDefaultConfiguration(spec) # Decide the guid of the project. guid = default_config.get('msvs_guid') if guid: if VALID_MSVS_GUID_CHARS.match(guid) is None: raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % (guid, VALID_MSVS_GUID_CHARS.pattern)) guid = '{%s}' % guid guid = guid or MSVSNew.MakeGuid(proj_path) return guid def _GetMsbuildToolsetOfProject(proj_path, spec, version): """Get the platform toolset for the project. Arguments: proj_path: Path of the vcproj or vcxproj file to generate. spec: The target dictionary containing the properties of the target. version: The MSVSVersion object. Returns: the platform toolset string or None. """ # Pluck out the default configuration. default_config = _GetDefaultConfiguration(spec) toolset = default_config.get('msbuild_toolset') if not toolset and version.DefaultToolset(): toolset = version.DefaultToolset() return toolset def _GenerateProject(project, options, version, generator_flags): """Generates a vcproj file. Arguments: project: the MSVSProject object. options: global generator options. version: the MSVSVersion object. generator_flags: dict of generator-specific flags. Returns: A list of source files that cannot be found on disk. """ default_config = _GetDefaultConfiguration(project.spec) # Skip emitting anything if told to with msvs_existing_vcproj option. if default_config.get('msvs_existing_vcproj'): return [] if version.UsesVcxproj(): return _GenerateMSBuildProject(project, options, version, generator_flags) else: return _GenerateMSVSProject(project, options, version, generator_flags) def _GenerateMSVSProject(project, options, version, generator_flags): """Generates a .vcproj file. It may create .rules and .user files too. Arguments: project: The project object we will generate the file for. options: Global options passed to the generator. version: The VisualStudioVersion object. generator_flags: dict of generator-specific flags. """ spec = project.spec gyp.common.EnsureDirExists(project.path) platforms = _GetUniquePlatforms(spec) p = MSVSProject.Writer(project.path, version, spec['target_name'], project.guid, platforms) # Get directory project file is in. project_dir = os.path.split(project.path)[0] gyp_path = _NormalizedSource(project.build_file) relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) config_type = _GetMSVSConfigurationType(spec, project.build_file) for config_name, config in spec['configurations'].iteritems(): _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) # Prepare list of sources and excluded sources. gyp_file = os.path.split(project.build_file)[1] sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) # Add rules. actions_to_add = {} _GenerateRulesForMSVS(p, project_dir, options, spec, sources, excluded_sources, actions_to_add) list_excluded = generator_flags.get('msvs_list_excluded_files', True) sources, excluded_sources, excluded_idl = ( _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, sources, excluded_sources, list_excluded, version)) # Add in files. missing_sources = _VerifySourcesExist(sources, project_dir) p.AddFiles(sources) _AddToolFilesToMSVS(p, spec) _HandlePreCompiledHeaders(p, sources, spec) _AddActions(actions_to_add, spec, relative_path_of_gyp_file) _AddCopies(actions_to_add, spec) _WriteMSVSUserFile(project.path, version, spec) # NOTE: this stanza must appear after all actions have been decided. # Don't excluded sources with actions attached, or they won't run. excluded_sources = _FilterActionsFromExcluded( excluded_sources, actions_to_add) _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded) _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) # Write it out. p.WriteIfChanged() return missing_sources def _GetUniquePlatforms(spec): """Returns the list of unique platforms for this spec, e.g ['win32', ...]. Arguments: spec: The target dictionary containing the properties of the target. Returns: The MSVSUserFile object created. """ # Gather list of unique platforms. platforms = OrderedSet() for configuration in spec['configurations']: platforms.add(_ConfigPlatform(spec['configurations'][configuration])) platforms = list(platforms) return platforms def _CreateMSVSUserFile(proj_path, version, spec): """Generates a .user file for the user running this Gyp program. Arguments: proj_path: The path of the project file being created. The .user file shares the same path (with an appropriate suffix). version: The VisualStudioVersion object. spec: The target dictionary containing the properties of the target. Returns: The MSVSUserFile object created. """ (domain, username) = _GetDomainAndUserName() vcuser_filename = '.'.join([proj_path, domain, username, 'user']) user_file = MSVSUserFile.Writer(vcuser_filename, version, spec['target_name']) return user_file def _GetMSVSConfigurationType(spec, build_file): """Returns the configuration type for this project. It's a number defined by Microsoft. May raise an exception. Args: spec: The target dictionary containing the properties of the target. build_file: The path of the gyp file. Returns: An integer, the configuration type. """ try: config_type = { 'executable': '1', # .exe 'shared_library': '2', # .dll 'loadable_module': '2', # .dll 'static_library': '4', # .lib 'none': '10', # Utility type }[spec['type']] except KeyError: if spec.get('type'): raise GypError('Target type %s is not a valid target type for ' 'target %s in %s.' % (spec['type'], spec['target_name'], build_file)) else: raise GypError('Missing type field for target %s in %s.' % (spec['target_name'], build_file)) return config_type def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): """Adds a configuration to the MSVS project. Many settings in a vcproj file are specific to a configuration. This function the main part of the vcproj file that's configuration specific. Arguments: p: The target project being generated. spec: The target dictionary containing the properties of the target. config_type: The configuration type, a number as defined by Microsoft. config_name: The name of the configuration. config: The dictionary that defines the special processing to be done for this configuration. """ # Get the information for this configuration include_dirs, resource_include_dirs = _GetIncludeDirs(config) libraries = _GetLibraries(spec) library_dirs = _GetLibraryDirs(config) out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) defines = _GetDefines(config) defines = [_EscapeCppDefineForMSVS(d) for d in defines] disabled_warnings = _GetDisabledWarnings(config) prebuild = config.get('msvs_prebuild') postbuild = config.get('msvs_postbuild') def_file = _GetModuleDefinition(spec) precompiled_header = config.get('msvs_precompiled_header') # Prepare the list of tools as a dictionary. tools = dict() # Add in user specified msvs_settings. msvs_settings = config.get('msvs_settings', {}) MSVSSettings.ValidateMSVSSettings(msvs_settings) # Prevent default library inheritance from the environment. _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)']) for tool in msvs_settings: settings = config['msvs_settings'][tool] for setting in settings: _ToolAppend(tools, tool, setting, settings[setting]) # Add the information to the appropriate tool _ToolAppend(tools, 'VCCLCompilerTool', 'AdditionalIncludeDirectories', include_dirs) _ToolAppend(tools, 'VCResourceCompilerTool', 'AdditionalIncludeDirectories', resource_include_dirs) # Add in libraries. _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries) _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories', library_dirs) if out_file: _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True) # Add defines. _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines) _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions', defines) # Change program database directory to prevent collisions. _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True) # Add disabled warnings. _ToolAppend(tools, 'VCCLCompilerTool', 'DisableSpecificWarnings', disabled_warnings) # Add Pre-build. _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) # Add Post-build. _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) # Turn on precompiled headers if appropriate. if precompiled_header: precompiled_header = os.path.split(precompiled_header)[1] _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') _ToolAppend(tools, 'VCCLCompilerTool', 'PrecompiledHeaderThrough', precompiled_header) _ToolAppend(tools, 'VCCLCompilerTool', 'ForcedIncludeFiles', precompiled_header) # Loadable modules don't generate import libraries; # tell dependent projects to not expect one. if spec['type'] == 'loadable_module': _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') # Set the module definition file if any. if def_file: _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file) _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) def _GetIncludeDirs(config): """Returns the list of directories to be used for #include directives. Arguments: config: The dictionary that defines the special processing to be done for this configuration. Returns: The list of directory paths. """ # TODO(bradnelson): include_dirs should really be flexible enough not to # require this sort of thing. include_dirs = ( config.get('include_dirs', []) + config.get('msvs_system_include_dirs', [])) resource_include_dirs = config.get('resource_include_dirs', include_dirs) include_dirs = _FixPaths(include_dirs) resource_include_dirs = _FixPaths(resource_include_dirs) return include_dirs, resource_include_dirs def _GetLibraryDirs(config): """Returns the list of directories to be used for library search paths. Arguments: config: The dictionary that defines the special processing to be done for this configuration. Returns: The list of directory paths. """ library_dirs = config.get('library_dirs', []) library_dirs = _FixPaths(library_dirs) return library_dirs def _GetLibraries(spec): """Returns the list of libraries for this configuration. Arguments: spec: The target dictionary containing the properties of the target. Returns: The list of directory paths. """ libraries = spec.get('libraries', []) # Strip out -l, as it is not used on windows (but is needed so we can pass # in libraries that are assumed to be in the default library path). # Also remove duplicate entries, leaving only the last duplicate, while # preserving order. found = OrderedSet() unique_libraries_list = [] for entry in reversed(libraries): library = re.sub('^\-l', '', entry) if not os.path.splitext(library)[1]: library += '.lib' if library not in found: found.add(library) unique_libraries_list.append(library) unique_libraries_list.reverse() return unique_libraries_list def _GetOutputFilePathAndTool(spec, msbuild): """Returns the path and tool to use for this target. Figures out the path of the file this spec will create and the name of the VC tool that will create it. Arguments: spec: The target dictionary containing the properties of the target. Returns: A triple of (file path, name of the vc tool, name of the msbuild tool) """ # Select a name for the output file. out_file = '' vc_tool = '' msbuild_tool = '' output_file_map = { 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'), 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'), } output_file_props = output_file_map.get(spec['type']) if output_file_props and int(spec.get('msvs_auto_output_file', 1)): vc_tool, msbuild_tool, out_dir, suffix = output_file_props if spec.get('standalone_static_library', 0): out_dir = '$(OutDir)' out_dir = spec.get('product_dir', out_dir) product_extension = spec.get('product_extension') if product_extension: suffix = '.' + product_extension elif msbuild: suffix = '$(TargetExt)' prefix = spec.get('product_prefix', '') product_name = spec.get('product_name', '$(ProjectName)') out_file = ntpath.join(out_dir, prefix + product_name + suffix) return out_file, vc_tool, msbuild_tool def _GetOutputTargetExt(spec): """Returns the extension for this target, including the dot If product_extension is specified, set target_extension to this to avoid MSB8012, returns None otherwise. Ignores any target_extension settings in the input files. Arguments: spec: The target dictionary containing the properties of the target. Returns: A string with the extension, or None """ target_extension = spec.get('product_extension') if target_extension: return '.' + target_extension return None def _GetDefines(config): """Returns the list of preprocessor definitions for this configuation. Arguments: config: The dictionary that defines the special processing to be done for this configuration. Returns: The list of preprocessor definitions. """ defines = [] for d in config.get('defines', []): if type(d) == list: fd = '='.join([str(dpart) for dpart in d]) else: fd = str(d) defines.append(fd) return defines def _GetDisabledWarnings(config): return [str(i) for i in config.get('msvs_disabled_warnings', [])] def _GetModuleDefinition(spec): def_file = '' if spec['type'] in ['shared_library', 'loadable_module', 'executable']: def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] if len(def_files) == 1: def_file = _FixPath(def_files[0]) elif def_files: raise ValueError( 'Multiple module definition files in one target, target %s lists ' 'multiple .def files: %s' % ( spec['target_name'], ' '.join(def_files))) return def_file def _ConvertToolsToExpectedForm(tools): """Convert tools to a form expected by Visual Studio. Arguments: tools: A dictionary of settings; the tool name is the key. Returns: A list of Tool objects. """ tool_list = [] for tool, settings in tools.iteritems(): # Collapse settings with lists. settings_fixed = {} for setting, value in settings.iteritems(): if type(value) == list: if ((tool == 'VCLinkerTool' and setting == 'AdditionalDependencies') or setting == 'AdditionalOptions'): settings_fixed[setting] = ' '.join(value) else: settings_fixed[setting] = ';'.join(value) else: settings_fixed[setting] = value # Add in this tool. tool_list.append(MSVSProject.Tool(tool, settings_fixed)) return tool_list def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): """Add to the project file the configuration specified by config. Arguments: p: The target project being generated. spec: the target project dict. tools: A dictionary of settings; the tool name is the key. config: The dictionary that defines the special processing to be done for this configuration. config_type: The configuration type, a number as defined by Microsoft. config_name: The name of the configuration. """ attributes = _GetMSVSAttributes(spec, config, config_type) # Add in this configuration. tool_list = _ConvertToolsToExpectedForm(tools) p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list) def _GetMSVSAttributes(spec, config, config_type): # Prepare configuration attributes. prepared_attrs = {} source_attrs = config.get('msvs_configuration_attributes', {}) for a in source_attrs: prepared_attrs[a] = source_attrs[a] # Add props files. vsprops_dirs = config.get('msvs_props', []) vsprops_dirs = _FixPaths(vsprops_dirs) if vsprops_dirs: prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) # Set configuration type. prepared_attrs['ConfigurationType'] = config_type output_dir = prepared_attrs.get('OutputDirectory', '$(SolutionDir)$(ConfigurationName)') prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\' if 'IntermediateDirectory' not in prepared_attrs: intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\' else: intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\' intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) prepared_attrs['IntermediateDirectory'] = intermediate return prepared_attrs def _AddNormalizedSources(sources_set, sources_array): sources_set.update(_NormalizedSource(s) for s in sources_array) def _PrepareListOfSources(spec, generator_flags, gyp_file): """Prepare list of sources and excluded sources. Besides the sources specified directly in the spec, adds the gyp file so that a change to it will cause a re-compile. Also adds appropriate sources for actions and copies. Assumes later stage will un-exclude files which have custom build steps attached. Arguments: spec: The target dictionary containing the properties of the target. gyp_file: The name of the gyp file. Returns: A pair of (list of sources, list of excluded sources). The sources will be relative to the gyp file. """ sources = OrderedSet() _AddNormalizedSources(sources, spec.get('sources', [])) excluded_sources = OrderedSet() # Add in the gyp file. if not generator_flags.get('standalone'): sources.add(gyp_file) # Add in 'action' inputs and outputs. for a in spec.get('actions', []): inputs = a['inputs'] inputs = [_NormalizedSource(i) for i in inputs] # Add all inputs to sources and excluded sources. inputs = OrderedSet(inputs) sources.update(inputs) if not spec.get('msvs_external_builder'): excluded_sources.update(inputs) if int(a.get('process_outputs_as_sources', False)): _AddNormalizedSources(sources, a.get('outputs', [])) # Add in 'copies' inputs and outputs. for cpy in spec.get('copies', []): _AddNormalizedSources(sources, cpy.get('files', [])) return (sources, excluded_sources) def _AdjustSourcesAndConvertToFilterHierarchy( spec, options, gyp_dir, sources, excluded_sources, list_excluded, version): """Adjusts the list of sources and excluded sources. Also converts the sets to lists. Arguments: spec: The target dictionary containing the properties of the target. options: Global generator options. gyp_dir: The path to the gyp file being processed. sources: A set of sources to be included for this project. excluded_sources: A set of sources to be excluded for this project. version: A MSVSVersion object. Returns: A trio of (list of sources, list of excluded sources, path of excluded IDL file) """ # Exclude excluded sources coming into the generator. excluded_sources.update(OrderedSet(spec.get('sources_excluded', []))) # Add excluded sources into sources for good measure. sources.update(excluded_sources) # Convert to proper windows form. # NOTE: sources goes from being a set to a list here. # NOTE: excluded_sources goes from being a set to a list here. sources = _FixPaths(sources) # Convert to proper windows form. excluded_sources = _FixPaths(excluded_sources) excluded_idl = _IdlFilesHandledNonNatively(spec, sources) precompiled_related = _GetPrecompileRelatedFiles(spec) # Find the excluded ones, minus the precompiled header related ones. fully_excluded = [i for i in excluded_sources if i not in precompiled_related] # Convert to folders and the right slashes. sources = [i.split('\\') for i in sources] sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, list_excluded=list_excluded, msvs_version=version) # Prune filters with a single child to flatten ugly directory structures # such as ../../src/modules/module1 etc. while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): sources = sources[0].contents return sources, excluded_sources, excluded_idl def _IdlFilesHandledNonNatively(spec, sources): # If any non-native rules use 'idl' as an extension exclude idl files. # Gather a list here to use later. using_idl = False for rule in spec.get('rules', []): if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): using_idl = True break if using_idl: excluded_idl = [i for i in sources if i.endswith('.idl')] else: excluded_idl = [] return excluded_idl def _GetPrecompileRelatedFiles(spec): # Gather a list of precompiled header related sources. precompiled_related = [] for _, config in spec['configurations'].iteritems(): for k in precomp_keys: f = config.get(k) if f: precompiled_related.append(_FixPath(f)) return precompiled_related def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) for file_name, excluded_configs in exclusions.iteritems(): if (not list_excluded and len(excluded_configs) == len(spec['configurations'])): # If we're not listing excluded files, then they won't appear in the # project, so don't try to configure them to be excluded. pass else: for config_name, config in excluded_configs: p.AddFileConfig(file_name, _ConfigFullName(config_name, config), {'ExcludedFromBuild': 'true'}) def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): exclusions = {} # Exclude excluded sources from being built. for f in excluded_sources: excluded_configs = [] for config_name, config in spec['configurations'].iteritems(): precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] # Don't do this for ones that are precompiled header related. if f not in precomped: excluded_configs.append((config_name, config)) exclusions[f] = excluded_configs # If any non-native rules use 'idl' as an extension exclude idl files. # Exclude them now. for f in excluded_idl: excluded_configs = [] for config_name, config in spec['configurations'].iteritems(): excluded_configs.append((config_name, config)) exclusions[f] = excluded_configs return exclusions def _AddToolFilesToMSVS(p, spec): # Add in tool files (rules). tool_files = OrderedSet() for _, config in spec['configurations'].iteritems(): for f in config.get('msvs_tool_files', []): tool_files.add(f) for f in tool_files: p.AddToolFile(f) def _HandlePreCompiledHeaders(p, sources, spec): # Pre-compiled header source stubs need a different compiler flag # (generate precompiled header) and any source file not of the same # kind (i.e. C vs. C++) as the precompiled header source stub needs # to have use of precompiled headers disabled. extensions_excluded_from_precompile = [] for config_name, config in spec['configurations'].iteritems(): source = config.get('msvs_precompiled_source') if source: source = _FixPath(source) # UsePrecompiledHeader=1 for if using precompiled headers. tool = MSVSProject.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '1'}) p.AddFileConfig(source, _ConfigFullName(config_name, config), {}, tools=[tool]) basename, extension = os.path.splitext(source) if extension == '.c': extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] else: extensions_excluded_from_precompile = ['.c'] def DisableForSourceTree(source_tree): for source in source_tree: if isinstance(source, MSVSProject.Filter): DisableForSourceTree(source.contents) else: basename, extension = os.path.splitext(source) if extension in extensions_excluded_from_precompile: for config_name, config in spec['configurations'].iteritems(): tool = MSVSProject.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '0', 'ForcedIncludeFiles': '$(NOINHERIT)'}) p.AddFileConfig(_FixPath(source), _ConfigFullName(config_name, config), {}, tools=[tool]) # Do nothing if there was no precompiled source. if extensions_excluded_from_precompile: DisableForSourceTree(sources) def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): # Add actions. actions = spec.get('actions', []) # Don't setup_env every time. When all the actions are run together in one # batch file in VS, the PATH will grow too long. # Membership in this set means that the cygwin environment has been set up, # and does not need to be set up again. have_setup_env = set() for a in actions: # Attach actions to the gyp file if nothing else is there. inputs = a.get('inputs') or [relative_path_of_gyp_file] attached_to = inputs[0] need_setup_env = attached_to not in have_setup_env cmd = _BuildCommandLineForRule(spec, a, has_input_path=False, do_setup_env=need_setup_env) have_setup_env.add(attached_to) # Add the action. _AddActionStep(actions_to_add, inputs=inputs, outputs=a.get('outputs', []), description=a.get('message', a['action_name']), command=cmd) def _WriteMSVSUserFile(project_path, version, spec): # Add run_as and test targets. if 'run_as' in spec: run_as = spec['run_as'] action = run_as.get('action', []) environment = run_as.get('environment', []) working_directory = run_as.get('working_directory', '.') elif int(spec.get('test', 0)): action = ['$(TargetPath)', '--gtest_print_time'] environment = [] working_directory = '.' else: return # Nothing to add # Write out the user file. user_file = _CreateMSVSUserFile(project_path, version, spec) for config_name, c_data in spec['configurations'].iteritems(): user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), action, environment, working_directory) user_file.WriteIfChanged() def _AddCopies(actions_to_add, spec): copies = _GetCopies(spec) for inputs, outputs, cmd, description in copies: _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs, description=description, command=cmd) def _GetCopies(spec): copies = [] # Add copies. for cpy in spec.get('copies', []): for src in cpy.get('files', []): dst = os.path.join(cpy['destination'], os.path.basename(src)) # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and # outputs, so do the same for our generated command line. if src.endswith('/'): src_bare = src[:-1] base_dir = posixpath.split(src_bare)[0] outer_dir = posixpath.split(src_bare)[1] cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) copies.append(([src], ['dummy_copies', dst], cmd, 'Copying %s to %s' % (src, dst))) else: cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst))) return copies def _GetPathDict(root, path): # |path| will eventually be empty (in the recursive calls) if it was initially # relative; otherwise it will eventually end up as '\', 'D:\', etc. if not path or path.endswith(os.sep): return root parent, folder = os.path.split(path) parent_dict = _GetPathDict(root, parent) if folder not in parent_dict: parent_dict[folder] = dict() return parent_dict[folder] def _DictsToFolders(base_path, bucket, flat): # Convert to folders recursively. children = [] for folder, contents in bucket.iteritems(): if type(contents) == dict: folder_children = _DictsToFolders(os.path.join(base_path, folder), contents, flat) if flat: children += folder_children else: folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), name='(' + folder + ')', entries=folder_children) children.append(folder_children) else: children.append(contents) return children def _CollapseSingles(parent, node): # Recursively explorer the tree of dicts looking for projects which are # the sole item in a folder which has the same name as the project. Bring # such projects up one level. if (type(node) == dict and len(node) == 1 and node.keys()[0] == parent + '.vcproj'): return node[node.keys()[0]] if type(node) != dict: return node for child in node: node[child] = _CollapseSingles(child, node[child]) return node def _GatherSolutionFolders(sln_projects, project_objects, flat): root = {} # Convert into a tree of dicts on path. for p in sln_projects: gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] gyp_dir = os.path.dirname(gyp_file) path_dict = _GetPathDict(root, gyp_dir) path_dict[target + '.vcproj'] = project_objects[p] # Walk down from the top until we hit a folder that has more than one entry. # In practice, this strips the top-level "src/" dir from the hierarchy in # the solution. while len(root) == 1 and type(root[root.keys()[0]]) == dict: root = root[root.keys()[0]] # Collapse singles. root = _CollapseSingles('', root) # Merge buckets until everything is a root entry. return _DictsToFolders('', root, flat) def _GetPathOfProject(qualified_target, spec, options, msvs_version): default_config = _GetDefaultConfiguration(spec) proj_filename = default_config.get('msvs_existing_vcproj') if not proj_filename: proj_filename = (spec['target_name'] + options.suffix + msvs_version.ProjectExtension()) build_file = gyp.common.BuildFile(qualified_target) proj_path = os.path.join(os.path.dirname(build_file), proj_filename) fix_prefix = None if options.generator_output: project_dir_path = os.path.dirname(os.path.abspath(proj_path)) proj_path = os.path.join(options.generator_output, proj_path) fix_prefix = gyp.common.RelativePath(project_dir_path, os.path.dirname(proj_path)) return proj_path, fix_prefix def _GetPlatformOverridesOfProject(spec): # Prepare a dict indicating which project configurations are used for which # solution configurations for this target. config_platform_overrides = {} for config_name, c in spec['configurations'].iteritems(): config_fullname = _ConfigFullName(config_name, c) platform = c.get('msvs_target_platform', _ConfigPlatform(c)) fixed_config_fullname = '%s|%s' % ( _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) config_platform_overrides[config_fullname] = fixed_config_fullname return config_platform_overrides def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): """Create a MSVSProject object for the targets found in target list. Arguments: target_list: the list of targets to generate project objects for. target_dicts: the dictionary of specifications. options: global generator options. msvs_version: the MSVSVersion object. Returns: A set of created projects, keyed by target. """ global fixpath_prefix # Generate each project. projects = {} for qualified_target in target_list: spec = target_dicts[qualified_target] if spec['toolset'] != 'target': raise GypError( 'Multiple toolsets not supported in msvs build (target %s)' % qualified_target) proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, options, msvs_version) guid = _GetGuidOfProject(proj_path, spec) overrides = _GetPlatformOverridesOfProject(spec) build_file = gyp.common.BuildFile(qualified_target) # Create object for this project. obj = MSVSNew.MSVSProject( proj_path, name=spec['target_name'], guid=guid, spec=spec, build_file=build_file, config_platform_overrides=overrides, fixpath_prefix=fixpath_prefix) # Set project toolset if any (MS build only) if msvs_version.UsesVcxproj(): obj.set_msbuild_toolset( _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)) projects[qualified_target] = obj # Set all the dependencies, but not if we are using an external builder like # ninja for project in projects.values(): if not project.spec.get('msvs_external_builder'): deps = project.spec.get('dependencies', []) deps = [projects[d] for d in deps] project.set_dependencies(deps) return projects def _InitNinjaFlavor(options, target_list, target_dicts): """Initialize targets for the ninja flavor. This sets up the necessary variables in the targets to generate msvs projects that use ninja as an external builder. The variables in the spec are only set if they have not been set. This allows individual specs to override the default values initialized here. Arguments: options: Options provided to the generator. target_list: List of target pairs: 'base/base.gyp:base'. target_dicts: Dict of target properties keyed on target pair. """ for qualified_target in target_list: spec = target_dicts[qualified_target] if spec.get('msvs_external_builder'): # The spec explicitly defined an external builder, so don't change it. continue path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe') spec['msvs_external_builder'] = 'ninja' if not spec.get('msvs_external_builder_out_dir'): spec['msvs_external_builder_out_dir'] = \ options.depth + '/out/$(Configuration)' if not spec.get('msvs_external_builder_build_cmd'): spec['msvs_external_builder_build_cmd'] = [ path_to_ninja, '-C', '$(OutDir)', '$(ProjectName)', ] if not spec.get('msvs_external_builder_clean_cmd'): spec['msvs_external_builder_clean_cmd'] = [ path_to_ninja, '-C', '$(OutDir)', '-t', 'clean', '$(ProjectName)', ] def CalculateVariables(default_variables, params): """Generated variables that require params to be known.""" generator_flags = params.get('generator_flags', {}) # Select project file format version (if unset, default to auto detecting). msvs_version = MSVSVersion.SelectVisualStudioVersion( generator_flags.get('msvs_version', 'auto')) # Stash msvs_version for later (so we don't have to probe the system twice). params['msvs_version'] = msvs_version # Set a variable so conditions can be based on msvs_version. default_variables['MSVS_VERSION'] = msvs_version.ShortName() # To determine processor word size on Windows, in addition to checking # PROCESSOR_ARCHITECTURE (which reflects the word size of the current # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which # contains the actual word size of the system when running thru WOW64). if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): default_variables['MSVS_OS_BITS'] = 64 else: default_variables['MSVS_OS_BITS'] = 32 if gyp.common.GetFlavor(params) == 'ninja': default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen' def PerformBuild(data, configurations, params): options = params['options'] msvs_version = params['msvs_version'] devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') for build_file, build_file_dict in data.iteritems(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue sln_path = build_file_root + options.suffix + '.sln' if options.generator_output: sln_path = os.path.join(options.generator_output, sln_path) for config in configurations: arguments = [devenv, sln_path, '/Build', config] print 'Building [%s]: %s' % (config, arguments) rtn = subprocess.check_call(arguments) def GenerateOutput(target_list, target_dicts, data, params): """Generate .sln and .vcproj files. This is the entry point for this generator. Arguments: target_list: List of target pairs: 'base/base.gyp:base'. target_dicts: Dict of target properties keyed on target pair. data: Dictionary containing per .gyp data. """ global fixpath_prefix options = params['options'] # Get the project file format version back out of where we stashed it in # GeneratorCalculatedVariables. msvs_version = params['msvs_version'] generator_flags = params.get('generator_flags', {}) # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) # Optionally use the large PDB workaround for targets marked with # 'msvs_large_pdb': 1. (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( target_list, target_dicts, generator_default_variables) # Optionally configure each spec to use ninja as the external builder. if params.get('flavor') == 'ninja': _InitNinjaFlavor(options, target_list, target_dicts) # Prepare the set of configurations. configs = set() for qualified_target in target_list: spec = target_dicts[qualified_target] for config_name, config in spec['configurations'].iteritems(): configs.add(_ConfigFullName(config_name, config)) configs = list(configs) # Figure out all the projects that will be generated and their guids project_objects = _CreateProjectObjects(target_list, target_dicts, options, msvs_version) # Generate each project. missing_sources = [] for project in project_objects.values(): fixpath_prefix = project.fixpath_prefix missing_sources.extend(_GenerateProject(project, options, msvs_version, generator_flags)) fixpath_prefix = None for build_file in data: # Validate build_file extension if not build_file.endswith('.gyp'): continue sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln' if options.generator_output: sln_path = os.path.join(options.generator_output, sln_path) # Get projects in the solution, and their dependents. sln_projects = gyp.common.BuildFileTargets(target_list, build_file) sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) # Create folder hierarchy. root_entries = _GatherSolutionFolders( sln_projects, project_objects, flat=msvs_version.FlatSolution()) # Create solution. sln = MSVSNew.MSVSSolution(sln_path, entries=root_entries, variants=configs, websiteProperties=False, version=msvs_version) sln.Write() if missing_sources: error_message = "Missing input files:\n" + \ '\n'.join(set(missing_sources)) if generator_flags.get('msvs_error_on_missing_sources', False): raise GypError(error_message) else: print >> sys.stdout, "Warning: " + error_message def _GenerateMSBuildFiltersFile(filters_path, source_files, extension_to_rule_name): """Generate the filters file. This file is used by Visual Studio to organize the presentation of source files into folders. Arguments: filters_path: The path of the file to be created. source_files: The hierarchical structure of all the sources. extension_to_rule_name: A dictionary mapping file extensions to rules. """ filter_group = [] source_group = [] _AppendFiltersForMSBuild('', source_files, extension_to_rule_name, filter_group, source_group) if filter_group: content = ['Project', {'ToolsVersion': '4.0', 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' }, ['ItemGroup'] + filter_group, ['ItemGroup'] + source_group ] easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) elif os.path.exists(filters_path): # We don't need this filter anymore. Delete the old filter file. os.unlink(filters_path) def _AppendFiltersForMSBuild(parent_filter_name, sources, extension_to_rule_name, filter_group, source_group): """Creates the list of filters and sources to be added in the filter file. Args: parent_filter_name: The name of the filter under which the sources are found. sources: The hierarchy of filters and sources to process. extension_to_rule_name: A dictionary mapping file extensions to rules. filter_group: The list to which filter entries will be appended. source_group: The list to which source entries will be appeneded. """ for source in sources: if isinstance(source, MSVSProject.Filter): # We have a sub-filter. Create the name of that sub-filter. if not parent_filter_name: filter_name = source.name else: filter_name = '%s\\%s' % (parent_filter_name, source.name) # Add the filter to the group. filter_group.append( ['Filter', {'Include': filter_name}, ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]]) # Recurse and add its dependents. _AppendFiltersForMSBuild(filter_name, source.contents, extension_to_rule_name, filter_group, source_group) else: # It's a source. Create a source entry. _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name) source_entry = [element, {'Include': source}] # Specify the filter it is part of, if any. if parent_filter_name: source_entry.append(['Filter', parent_filter_name]) source_group.append(source_entry) def _MapFileToMsBuildSourceType(source, extension_to_rule_name): """Returns the group and element type of the source file. Arguments: source: The source file name. extension_to_rule_name: A dictionary mapping file extensions to rules. Returns: A pair of (group this file should be part of, the label of element) """ _, ext = os.path.splitext(source) if ext in extension_to_rule_name: group = 'rule' element = extension_to_rule_name[ext] elif ext in ['.cc', '.cpp', '.c', '.cxx']: group = 'compile' element = 'ClCompile' elif ext in ['.h', '.hxx']: group = 'include' element = 'ClInclude' elif ext == '.rc': group = 'resource' element = 'ResourceCompile' elif ext == '.idl': group = 'midl' element = 'Midl' else: group = 'none' element = 'None' return (group, element) def _GenerateRulesForMSBuild(output_dir, options, spec, sources, excluded_sources, props_files_of_rules, targets_files_of_rules, actions_to_add, extension_to_rule_name): # MSBuild rules are implemented using three files: an XML file, a .targets # file and a .props file. # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx # for more details. rules = spec.get('rules', []) rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] msbuild_rules = [] for rule in rules_native: # Skip a rule with no action and no inputs. if 'action' not in rule and not rule.get('rule_sources', []): continue msbuild_rule = MSBuildRule(rule, spec) msbuild_rules.append(msbuild_rule) extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name if msbuild_rules: base = spec['target_name'] + options.suffix props_name = base + '.props' targets_name = base + '.targets' xml_name = base + '.xml' props_files_of_rules.add(props_name) targets_files_of_rules.add(targets_name) props_path = os.path.join(output_dir, props_name) targets_path = os.path.join(output_dir, targets_name) xml_path = os.path.join(output_dir, xml_name) _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) if rules_external: _GenerateExternalRules(rules_external, output_dir, spec, sources, options, actions_to_add) _AdjustSourcesForRules(spec, rules, sources, excluded_sources) class MSBuildRule(object): """Used to store information used to generate an MSBuild rule. Attributes: rule_name: The rule name, sanitized to use in XML. target_name: The name of the target. after_targets: The name of the AfterTargets element. before_targets: The name of the BeforeTargets element. depends_on: The name of the DependsOn element. compute_output: The name of the ComputeOutput element. dirs_to_make: The name of the DirsToMake element. inputs: The name of the _inputs element. tlog: The name of the _tlog element. extension: The extension this rule applies to. description: The message displayed when this rule is invoked. additional_dependencies: A string listing additional dependencies. outputs: The outputs of this rule. command: The command used to run the rule. """ def __init__(self, rule, spec): self.display_name = rule['rule_name'] # Assure that the rule name is only characters and numbers self.rule_name = re.sub(r'\W', '_', self.display_name) # Create the various element names, following the example set by the # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 # is sensitive to the exact names. self.target_name = '_' + self.rule_name self.after_targets = self.rule_name + 'AfterTargets' self.before_targets = self.rule_name + 'BeforeTargets' self.depends_on = self.rule_name + 'DependsOn' self.compute_output = 'Compute%sOutput' % self.rule_name self.dirs_to_make = self.rule_name + 'DirsToMake' self.inputs = self.rule_name + '_inputs' self.tlog = self.rule_name + '_tlog' self.extension = rule['extension'] if not self.extension.startswith('.'): self.extension = '.' + self.extension self.description = MSVSSettings.ConvertVCMacrosToMSBuild( rule.get('message', self.rule_name)) old_additional_dependencies = _FixPaths(rule.get('inputs', [])) self.additional_dependencies = ( ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_additional_dependencies])) old_outputs = _FixPaths(rule.get('outputs', [])) self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs]) old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True, do_setup_env=True) self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): """Generate the .props file.""" content = ['Project', {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}] for rule in msbuild_rules: content.extend([ ['PropertyGroup', {'Condition': "'$(%s)' == '' and '$(%s)' == '' and " "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets, rule.after_targets) }, [rule.before_targets, 'Midl'], [rule.after_targets, 'CustomBuild'], ], ['PropertyGroup', [rule.depends_on, {'Condition': "'$(ConfigurationType)' != 'Makefile'"}, '_SelectedFiles;$(%s)' % rule.depends_on ], ], ['ItemDefinitionGroup', [rule.rule_name, ['CommandLineTemplate', rule.command], ['Outputs', rule.outputs], ['ExecutionDescription', rule.description], ['AdditionalDependencies', rule.additional_dependencies], ], ] ]) easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): """Generate the .targets file.""" content = ['Project', {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' } ] item_group = [ 'ItemGroup', ['PropertyPageSchema', {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'} ] ] for rule in msbuild_rules: item_group.append( ['AvailableItemName', {'Include': rule.rule_name}, ['Targets', rule.target_name], ]) content.append(item_group) for rule in msbuild_rules: content.append( ['UsingTask', {'TaskName': rule.rule_name, 'TaskFactory': 'XamlTaskFactory', 'AssemblyName': 'Microsoft.Build.Tasks.v4.0' }, ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'], ]) for rule in msbuild_rules: rule_name = rule.rule_name target_outputs = '%%(%s.Outputs)' % rule_name target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);' '$(MSBuildProjectFile)') % (rule_name, rule_name) rule_inputs = '%%(%s.Identity)' % rule_name extension_condition = ("'%(Extension)'=='.obj' or " "'%(Extension)'=='.res' or " "'%(Extension)'=='.rsc' or " "'%(Extension)'=='.lib'") remove_section = [ 'ItemGroup', {'Condition': "'@(SelectedFiles)' != ''"}, [rule_name, {'Remove': '@(%s)' % rule_name, 'Condition': "'%(Identity)' != '@(SelectedFiles)'" } ] ] inputs_section = [ 'ItemGroup', [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}] ] logging_section = [ 'ItemGroup', [rule.tlog, {'Include': '%%(%s.Outputs)' % rule_name, 'Condition': ("'%%(%s.Outputs)' != '' and " "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name)) }, ['Source', "@(%s, '|')" % rule_name], ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], ], ] message_section = [ 'Message', {'Importance': 'High', 'Text': '%%(%s.ExecutionDescription)' % rule_name } ] write_tlog_section = [ 'WriteLinesToFile', {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " "'true'" % (rule.tlog, rule.tlog), 'File': '$(IntDir)$(ProjectName).write.1.tlog', 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog, rule.tlog) } ] read_tlog_section = [ 'WriteLinesToFile', {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " "'true'" % (rule.tlog, rule.tlog), 'File': '$(IntDir)$(ProjectName).read.1.tlog', 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog) } ] command_and_input_section = [ rule_name, {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " "'true'" % (rule_name, rule_name), 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name, 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name, 'Inputs': rule_inputs } ] content.extend([ ['Target', {'Name': rule.target_name, 'BeforeTargets': '$(%s)' % rule.before_targets, 'AfterTargets': '$(%s)' % rule.after_targets, 'Condition': "'@(%s)' != ''" % rule_name, 'DependsOnTargets': '$(%s);%s' % (rule.depends_on, rule.compute_output), 'Outputs': target_outputs, 'Inputs': target_inputs }, remove_section, inputs_section, logging_section, message_section, write_tlog_section, read_tlog_section, command_and_input_section, ], ['PropertyGroup', ['ComputeLinkInputsTargets', '$(ComputeLinkInputsTargets);', '%s;' % rule.compute_output ], ['ComputeLibInputsTargets', '$(ComputeLibInputsTargets);', '%s;' % rule.compute_output ], ], ['Target', {'Name': rule.compute_output, 'Condition': "'@(%s)' != ''" % rule_name }, ['ItemGroup', [rule.dirs_to_make, {'Condition': "'@(%s)' != '' and " "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name), 'Include': '%%(%s.Outputs)' % rule_name } ], ['Link', {'Include': '%%(%s.Identity)' % rule.dirs_to_make, 'Condition': extension_condition } ], ['Lib', {'Include': '%%(%s.Identity)' % rule.dirs_to_make, 'Condition': extension_condition } ], ['ImpLib', {'Include': '%%(%s.Identity)' % rule.dirs_to_make, 'Condition': extension_condition } ], ], ['MakeDir', {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make) } ] ], ]) easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): # Generate the .xml file content = [ 'ProjectSchemaDefinitions', {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;' 'assembly=Microsoft.Build.Framework'), 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml', 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib', 'xmlns:transformCallback': 'Microsoft.Cpp.Dev10.ConvertPropertyCallback' } ] for rule in msbuild_rules: content.extend([ ['Rule', {'Name': rule.rule_name, 'PageTemplate': 'tool', 'DisplayName': rule.display_name, 'Order': '200' }, ['Rule.DataSource', ['DataSource', {'Persistence': 'ProjectFile', 'ItemType': rule.rule_name } ] ], ['Rule.Categories', ['Category', {'Name': 'General'}, ['Category.DisplayName', ['sys:String', 'General'], ], ], ['Category', {'Name': 'Command Line', 'Subtype': 'CommandLine' }, ['Category.DisplayName', ['sys:String', 'Command Line'], ], ], ], ['StringListProperty', {'Name': 'Inputs', 'Category': 'Command Line', 'IsRequired': 'true', 'Switch': ' ' }, ['StringListProperty.DataSource', ['DataSource', {'Persistence': 'ProjectFile', 'ItemType': rule.rule_name, 'SourceType': 'Item' } ] ], ], ['StringProperty', {'Name': 'CommandLineTemplate', 'DisplayName': 'Command Line', 'Visible': 'False', 'IncludeInCommandLine': 'False' } ], ['DynamicEnumProperty', {'Name': rule.before_targets, 'Category': 'General', 'EnumProvider': 'Targets', 'IncludeInCommandLine': 'False' }, ['DynamicEnumProperty.DisplayName', ['sys:String', 'Execute Before'], ], ['DynamicEnumProperty.Description', ['sys:String', 'Specifies the targets for the build customization' ' to run before.' ], ], ['DynamicEnumProperty.ProviderSettings', ['NameValuePair', {'Name': 'Exclude', 'Value': '^%s|^Compute' % rule.before_targets } ] ], ['DynamicEnumProperty.DataSource', ['DataSource', {'Persistence': 'ProjectFile', 'HasConfigurationCondition': 'true' } ] ], ], ['DynamicEnumProperty', {'Name': rule.after_targets, 'Category': 'General', 'EnumProvider': 'Targets', 'IncludeInCommandLine': 'False' }, ['DynamicEnumProperty.DisplayName', ['sys:String', 'Execute After'], ], ['DynamicEnumProperty.Description', ['sys:String', ('Specifies the targets for the build customization' ' to run after.') ], ], ['DynamicEnumProperty.ProviderSettings', ['NameValuePair', {'Name': 'Exclude', 'Value': '^%s|^Compute' % rule.after_targets } ] ], ['DynamicEnumProperty.DataSource', ['DataSource', {'Persistence': 'ProjectFile', 'ItemType': '', 'HasConfigurationCondition': 'true' } ] ], ], ['StringListProperty', {'Name': 'Outputs', 'DisplayName': 'Outputs', 'Visible': 'False', 'IncludeInCommandLine': 'False' } ], ['StringProperty', {'Name': 'ExecutionDescription', 'DisplayName': 'Execution Description', 'Visible': 'False', 'IncludeInCommandLine': 'False' } ], ['StringListProperty', {'Name': 'AdditionalDependencies', 'DisplayName': 'Additional Dependencies', 'IncludeInCommandLine': 'False', 'Visible': 'false' } ], ['StringProperty', {'Subtype': 'AdditionalOptions', 'Name': 'AdditionalOptions', 'Category': 'Command Line' }, ['StringProperty.DisplayName', ['sys:String', 'Additional Options'], ], ['StringProperty.Description', ['sys:String', 'Additional Options'], ], ], ], ['ItemType', {'Name': rule.rule_name, 'DisplayName': rule.display_name } ], ['FileExtension', {'Name': '*' + rule.extension, 'ContentType': rule.rule_name } ], ['ContentType', {'Name': rule.rule_name, 'DisplayName': '', 'ItemType': rule.rule_name } ] ]) easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) def _GetConfigurationAndPlatform(name, settings): configuration = name.rsplit('_', 1)[0] platform = settings.get('msvs_configuration_platform', 'Win32') return (configuration, platform) def _GetConfigurationCondition(name, settings): return (r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform(name, settings)) def _GetMSBuildProjectConfigurations(configurations): group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] for (name, settings) in sorted(configurations.iteritems()): configuration, platform = _GetConfigurationAndPlatform(name, settings) designation = '%s|%s' % (configuration, platform) group.append( ['ProjectConfiguration', {'Include': designation}, ['Configuration', configuration], ['Platform', platform]]) return [group] def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): namespace = os.path.splitext(gyp_file_name)[0] return [ ['PropertyGroup', {'Label': 'Globals'}, ['ProjectGuid', guid], ['Keyword', 'Win32Proj'], ['RootNamespace', namespace], ['IgnoreWarnCompileDuplicatedFilename', 'true'], ] ] def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} for name, settings in spec['configurations'].iteritems(): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') _AddConditionalProperty(properties, condition, 'ConfigurationType', msbuild_attributes['ConfigurationType']) if character_set: _AddConditionalProperty(properties, condition, 'CharacterSet', character_set) return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) def _GetMSBuildLocalProperties(msbuild_toolset): # Currently the only local property we support is PlatformToolset properties = {} if msbuild_toolset: properties = [ ['PropertyGroup', {'Label': 'Locals'}, ['PlatformToolset', msbuild_toolset], ] ] return properties def _GetMSBuildPropertySheets(configurations): user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' additional_props = {} props_specified = False for name, settings in sorted(configurations.iteritems()): configuration = _GetConfigurationCondition(name, settings) if settings.has_key('msbuild_props'): additional_props[configuration] = _FixPaths(settings['msbuild_props']) props_specified = True else: additional_props[configuration] = '' if not props_specified: return [ ['ImportGroup', {'Label': 'PropertySheets'}, ['Import', {'Project': user_props, 'Condition': "exists('%s')" % user_props, 'Label': 'LocalAppDataPlatform' } ] ] ] else: sheets = [] for condition, props in additional_props.iteritems(): import_group = [ 'ImportGroup', {'Label': 'PropertySheets', 'Condition': condition }, ['Import', {'Project': user_props, 'Condition': "exists('%s')" % user_props, 'Label': 'LocalAppDataPlatform' } ] ] for props_file in props: import_group.append(['Import', {'Project':props_file}]) sheets.append(import_group) return sheets def _ConvertMSVSBuildAttributes(spec, config, build_file): config_type = _GetMSVSConfigurationType(spec, build_file) msvs_attributes = _GetMSVSAttributes(spec, config, config_type) msbuild_attributes = {} for a in msvs_attributes: if a in ['IntermediateDirectory', 'OutputDirectory']: directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) if not directory.endswith('\\'): directory += '\\' msbuild_attributes[a] = directory elif a == 'CharacterSet': msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) elif a == 'ConfigurationType': msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) else: print 'Warning: Do not know how to convert MSVS attribute ' + a return msbuild_attributes def _ConvertMSVSCharacterSet(char_set): if char_set.isdigit(): char_set = { '0': 'MultiByte', '1': 'Unicode', '2': 'MultiByte', }[char_set] return char_set def _ConvertMSVSConfigurationType(config_type): if config_type.isdigit(): config_type = { '1': 'Application', '2': 'DynamicLibrary', '4': 'StaticLibrary', '10': 'Utility' }[config_type] return config_type def _GetMSBuildAttributes(spec, config, build_file): if 'msbuild_configuration_attributes' not in config: msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) else: config_type = _GetMSVSConfigurationType(spec, build_file) config_type = _ConvertMSVSConfigurationType(config_type) msbuild_attributes = config.get('msbuild_configuration_attributes', {}) msbuild_attributes.setdefault('ConfigurationType', config_type) output_dir = msbuild_attributes.get('OutputDirectory', '$(SolutionDir)$(Configuration)') msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\' if 'IntermediateDirectory' not in msbuild_attributes: intermediate = _FixPath('$(Configuration)') + '\\' msbuild_attributes['IntermediateDirectory'] = intermediate if 'CharacterSet' in msbuild_attributes: msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet( msbuild_attributes['CharacterSet']) if 'TargetName' not in msbuild_attributes: prefix = spec.get('product_prefix', '') product_name = spec.get('product_name', '$(ProjectName)') target_name = prefix + product_name msbuild_attributes['TargetName'] = target_name if spec.get('msvs_external_builder'): external_out_dir = spec.get('msvs_external_builder_out_dir', '.') msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\' # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' # (depending on the tool used) to avoid MSB8012 warning. msbuild_tool_map = { 'executable': 'Link', 'shared_library': 'Link', 'loadable_module': 'Link', 'static_library': 'Lib', } msbuild_tool = msbuild_tool_map.get(spec['type']) if msbuild_tool: msbuild_settings = config['finalized_msbuild_settings'] out_file = msbuild_settings[msbuild_tool].get('OutputFile') if out_file: msbuild_attributes['TargetPath'] = _FixPath(out_file) target_ext = msbuild_settings[msbuild_tool].get('TargetExt') if target_ext: msbuild_attributes['TargetExt'] = target_ext return msbuild_attributes def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): # TODO(jeanluc) We could optimize out the following and do it only if # there are actions. # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. new_paths = [] cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0] if cygwin_dirs: cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs) new_paths.append(cyg_path) # TODO(jeanluc) Change the convention to have both a cygwin_dir and a # python_dir. python_path = cyg_path.replace('cygwin\\bin', 'python_26') new_paths.append(python_path) if new_paths: new_paths = '$(ExecutablePath);' + ';'.join(new_paths) properties = {} for (name, configuration) in sorted(configurations.iteritems()): condition = _GetConfigurationCondition(name, configuration) attributes = _GetMSBuildAttributes(spec, configuration, build_file) msbuild_settings = configuration['finalized_msbuild_settings'] _AddConditionalProperty(properties, condition, 'IntDir', attributes['IntermediateDirectory']) _AddConditionalProperty(properties, condition, 'OutDir', attributes['OutputDirectory']) _AddConditionalProperty(properties, condition, 'TargetName', attributes['TargetName']) if attributes.get('TargetPath'): _AddConditionalProperty(properties, condition, 'TargetPath', attributes['TargetPath']) if attributes.get('TargetExt'): _AddConditionalProperty(properties, condition, 'TargetExt', attributes['TargetExt']) if new_paths: _AddConditionalProperty(properties, condition, 'ExecutablePath', new_paths) tool_settings = msbuild_settings.get('', {}) for name, value in sorted(tool_settings.iteritems()): formatted_value = _GetValueFormattedForMSBuild('', name, value) _AddConditionalProperty(properties, condition, name, formatted_value) return _GetMSBuildPropertyGroup(spec, None, properties) def _AddConditionalProperty(properties, condition, name, value): """Adds a property / conditional value pair to a dictionary. Arguments: properties: The dictionary to be modified. The key is the name of the property. The value is itself a dictionary; its key is the value and the value a list of condition for which this value is true. condition: The condition under which the named property has the value. name: The name of the property. value: The value of the property. """ if name not in properties: properties[name] = {} values = properties[name] if value not in values: values[value] = [] conditions = values[value] conditions.append(condition) # Regex for msvs variable references ( i.e. $(FOO) ). MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)') def _GetMSBuildPropertyGroup(spec, label, properties): """Returns a PropertyGroup definition for the specified properties. Arguments: spec: The target project dict. label: An optional label for the PropertyGroup. properties: The dictionary to be converted. The key is the name of the property. The value is itself a dictionary; its key is the value and the value a list of condition for which this value is true. """ group = ['PropertyGroup'] if label: group.append({'Label': label}) num_configurations = len(spec['configurations']) def GetEdges(node): # Use a definition of edges such that user_of_variable -> used_varible. # This happens to be easier in this case, since a variable's # definition contains all variables it references in a single string. edges = set() for value in sorted(properties[node].keys()): # Add to edges all $(...) references to variables. # # Variable references that refer to names not in properties are excluded # These can exist for instance to refer built in definitions like # $(SolutionDir). # # Self references are ignored. Self reference is used in a few places to # append to the default value. I.e. PATH=$(PATH);other_path edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value) if v in properties and v != node])) return edges properties_ordered = gyp.common.TopologicallySorted( properties.keys(), GetEdges) # Walk properties in the reverse of a topological sort on # user_of_variable -> used_variable as this ensures variables are # defined before they are used. # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) for name in reversed(properties_ordered): values = properties[name] for value, conditions in sorted(values.iteritems()): if len(conditions) == num_configurations: # If the value is the same all configurations, # just add one unconditional entry. group.append([name, value]) else: for condition in conditions: group.append([name, {'Condition': condition}, value]) return [group] def _GetMSBuildToolSettingsSections(spec, configurations): groups = [] for (name, configuration) in sorted(configurations.iteritems()): msbuild_settings = configuration['finalized_msbuild_settings'] group = ['ItemDefinitionGroup', {'Condition': _GetConfigurationCondition(name, configuration)} ] for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): # Skip the tool named '' which is a holder of global settings handled # by _GetMSBuildConfigurationGlobalProperties. if tool_name: if tool_settings: tool = [tool_name] for name, value in sorted(tool_settings.iteritems()): formatted_value = _GetValueFormattedForMSBuild(tool_name, name, value) tool.append([name, formatted_value]) group.append(tool) groups.append(group) return groups def _FinalizeMSBuildSettings(spec, configuration): if 'msbuild_settings' in configuration: converted = False msbuild_settings = configuration['msbuild_settings'] MSVSSettings.ValidateMSBuildSettings(msbuild_settings) else: converted = True msvs_settings = configuration.get('msvs_settings', {}) msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) libraries = _GetLibraries(spec) library_dirs = _GetLibraryDirs(configuration) out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) target_ext = _GetOutputTargetExt(spec) defines = _GetDefines(configuration) if converted: # Visual Studio 2010 has TR1 defines = [d for d in defines if d != '_HAS_TR1=0'] # Warn of ignored settings ignored_settings = ['msvs_tool_files'] for ignored_setting in ignored_settings: value = configuration.get(ignored_setting) if value: print ('Warning: The automatic conversion to MSBuild does not handle ' '%s. Ignoring setting of %s' % (ignored_setting, str(value))) defines = [_EscapeCppDefineForMSBuild(d) for d in defines] disabled_warnings = _GetDisabledWarnings(configuration) prebuild = configuration.get('msvs_prebuild') postbuild = configuration.get('msvs_postbuild') def_file = _GetModuleDefinition(spec) precompiled_header = configuration.get('msvs_precompiled_header') # Add the information to the appropriate tool # TODO(jeanluc) We could optimize and generate these settings only if # the corresponding files are found, e.g. don't generate ResourceCompile # if you don't have any resources. _ToolAppend(msbuild_settings, 'ClCompile', 'AdditionalIncludeDirectories', include_dirs) _ToolAppend(msbuild_settings, 'ResourceCompile', 'AdditionalIncludeDirectories', resource_include_dirs) # Add in libraries, note that even for empty libraries, we want this # set, to prevent inheriting default libraries from the enviroment. _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries) _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories', library_dirs) if out_file: _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file, only_if_unset=True) if target_ext: _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext, only_if_unset=True) # Add defines. _ToolAppend(msbuild_settings, 'ClCompile', 'PreprocessorDefinitions', defines) _ToolAppend(msbuild_settings, 'ResourceCompile', 'PreprocessorDefinitions', defines) # Add disabled warnings. _ToolAppend(msbuild_settings, 'ClCompile', 'DisableSpecificWarnings', disabled_warnings) # Turn on precompiled headers if appropriate. if precompiled_header: precompiled_header = os.path.split(precompiled_header)[1] _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use') _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeaderFile', precompiled_header) _ToolAppend(msbuild_settings, 'ClCompile', 'ForcedIncludeFiles', [precompiled_header]) # Loadable modules don't generate import libraries; # tell dependent projects to not expect one. if spec['type'] == 'loadable_module': _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true') # Set the module definition file if any. if def_file: _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file) configuration['finalized_msbuild_settings'] = msbuild_settings if prebuild: _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild) if postbuild: _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild) def _GetValueFormattedForMSBuild(tool_name, name, value): if type(value) == list: # For some settings, VS2010 does not automatically extends the settings # TODO(jeanluc) Is this what we want? if name in ['AdditionalIncludeDirectories', 'AdditionalLibraryDirectories', 'AdditionalOptions', 'DelayLoadDLLs', 'DisableSpecificWarnings', 'PreprocessorDefinitions']: value.append('%%(%s)' % name) # For most tools, entries in a list should be separated with ';' but some # settings use a space. Check for those first. exceptions = { 'ClCompile': ['AdditionalOptions'], 'Link': ['AdditionalOptions'], 'Lib': ['AdditionalOptions']} if tool_name in exceptions and name in exceptions[tool_name]: char = ' ' else: char = ';' formatted_value = char.join( [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]) else: formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) return formatted_value def _VerifySourcesExist(sources, root_dir): """Verifies that all source files exist on disk. Checks that all regular source files, i.e. not created at run time, exist on disk. Missing files cause needless recompilation but no otherwise visible errors. Arguments: sources: A recursive list of Filter/file names. root_dir: The root directory for the relative path names. Returns: A list of source files that cannot be found on disk. """ missing_sources = [] for source in sources: if isinstance(source, MSVSProject.Filter): missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) else: if '$' not in source: full_path = os.path.join(root_dir, source) if not os.path.exists(full_path): missing_sources.append(full_path) return missing_sources def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name, actions_spec, sources_handled_by_action, list_excluded): groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule'] grouped_sources = {} for g in groups: grouped_sources[g] = [] _AddSources2(spec, sources, exclusions, grouped_sources, extension_to_rule_name, sources_handled_by_action, list_excluded) sources = [] for g in groups: if grouped_sources[g]: sources.append(['ItemGroup'] + grouped_sources[g]) if actions_spec: sources.append(['ItemGroup'] + actions_spec) return sources def _AddSources2(spec, sources, exclusions, grouped_sources, extension_to_rule_name, sources_handled_by_action, list_excluded): extensions_excluded_from_precompile = [] for source in sources: if isinstance(source, MSVSProject.Filter): _AddSources2(spec, source.contents, exclusions, grouped_sources, extension_to_rule_name, sources_handled_by_action, list_excluded) else: if not source in sources_handled_by_action: detail = [] excluded_configurations = exclusions.get(source, []) if len(excluded_configurations) == len(spec['configurations']): detail.append(['ExcludedFromBuild', 'true']) else: for config_name, configuration in sorted(excluded_configurations): condition = _GetConfigurationCondition(config_name, configuration) detail.append(['ExcludedFromBuild', {'Condition': condition}, 'true']) # Add precompile if needed for config_name, configuration in spec['configurations'].iteritems(): precompiled_source = configuration.get('msvs_precompiled_source', '') if precompiled_source != '': precompiled_source = _FixPath(precompiled_source) if not extensions_excluded_from_precompile: # If the precompiled header is generated by a C source, we must # not try to use it for C++ sources, and vice versa. basename, extension = os.path.splitext(precompiled_source) if extension == '.c': extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] else: extensions_excluded_from_precompile = ['.c'] if precompiled_source == source: condition = _GetConfigurationCondition(config_name, configuration) detail.append(['PrecompiledHeader', {'Condition': condition}, 'Create' ]) else: # Turn off precompiled header usage for source files of a # different type than the file that generated the # precompiled header. for extension in extensions_excluded_from_precompile: if source.endswith(extension): detail.append(['PrecompiledHeader', '']) detail.append(['ForcedIncludeFiles', '']) group, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name) grouped_sources[group].append([element, {'Include': source}] + detail) def _GetMSBuildProjectReferences(project): references = [] if project.dependencies: group = ['ItemGroup'] for dependency in project.dependencies: guid = dependency.guid project_dir = os.path.split(project.path)[0] relative_path = gyp.common.RelativePath(dependency.path, project_dir) project_ref = ['ProjectReference', {'Include': relative_path}, ['Project', guid], ['ReferenceOutputAssembly', 'false'] ] for config in dependency.spec.get('configurations', {}).itervalues(): # If it's disabled in any config, turn it off in the reference. if config.get('msvs_2010_disable_uldi_when_referenced', 0): project_ref.append(['UseLibraryDependencyInputs', 'false']) break group.append(project_ref) references.append(group) return references def _GenerateMSBuildProject(project, options, version, generator_flags): spec = project.spec configurations = spec['configurations'] project_dir, project_file_name = os.path.split(project.path) gyp.common.EnsureDirExists(project.path) # Prepare list of sources and excluded sources. gyp_path = _NormalizedSource(project.build_file) relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) gyp_file = os.path.split(project.build_file)[1] sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) # Add rules. actions_to_add = {} props_files_of_rules = set() targets_files_of_rules = set() extension_to_rule_name = {} list_excluded = generator_flags.get('msvs_list_excluded_files', True) # Don't generate rules if we are using an external builder like ninja. if not spec.get('msvs_external_builder'): _GenerateRulesForMSBuild(project_dir, options, spec, sources, excluded_sources, props_files_of_rules, targets_files_of_rules, actions_to_add, extension_to_rule_name) else: rules = spec.get('rules', []) _AdjustSourcesForRules(spec, rules, sources, excluded_sources) sources, excluded_sources, excluded_idl = ( _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir, sources, excluded_sources, list_excluded, version)) # Don't add actions if we are using an external builder like ninja. if not spec.get('msvs_external_builder'): _AddActions(actions_to_add, spec, project.build_file) _AddCopies(actions_to_add, spec) # NOTE: this stanza must appear after all actions have been decided. # Don't excluded sources with actions attached, or they won't run. excluded_sources = _FilterActionsFromExcluded( excluded_sources, actions_to_add) exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( spec, actions_to_add) _GenerateMSBuildFiltersFile(project.path + '.filters', sources, extension_to_rule_name) missing_sources = _VerifySourcesExist(sources, project_dir) for configuration in configurations.itervalues(): _FinalizeMSBuildSettings(spec, configuration) # Add attributes to root element import_default_section = [ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] import_cpp_props_section = [ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] import_cpp_targets_section = [ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] content = [ 'Project', {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', 'ToolsVersion': version.ProjectVersion(), 'DefaultTargets': 'Build' }] content += _GetMSBuildProjectConfigurations(configurations) content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) content += import_default_section content += _GetMSBuildConfigurationDetails(spec, project.build_file) content += _GetMSBuildLocalProperties(project.msbuild_toolset) content += import_cpp_props_section content += _GetMSBuildExtensions(props_files_of_rules) content += _GetMSBuildPropertySheets(configurations) content += macro_section content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, project.build_file) content += _GetMSBuildToolSettingsSections(spec, configurations) content += _GetMSBuildSources( spec, sources, exclusions, extension_to_rule_name, actions_spec, sources_handled_by_action, list_excluded) content += _GetMSBuildProjectReferences(project) content += import_cpp_targets_section content += _GetMSBuildExtensionTargets(targets_files_of_rules) if spec.get('msvs_external_builder'): content += _GetMSBuildExternalBuilderTargets(spec) # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: # has_run_as = _WriteMSVSUserFile(project.path, version, spec) easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) return missing_sources def _GetMSBuildExternalBuilderTargets(spec): """Return a list of MSBuild targets for external builders. Right now, only "Build" and "Clean" targets are generated. Arguments: spec: The gyp target spec. Returns: List of MSBuild 'Target' specs. """ build_cmd = _BuildCommandLineForRuleRaw( spec, spec['msvs_external_builder_build_cmd'], False, False, False, False) build_target = ['Target', {'Name': 'Build'}] build_target.append(['Exec', {'Command': build_cmd}]) clean_cmd = _BuildCommandLineForRuleRaw( spec, spec['msvs_external_builder_clean_cmd'], False, False, False, False) clean_target = ['Target', {'Name': 'Clean'}] clean_target.append(['Exec', {'Command': clean_cmd}]) return [build_target, clean_target] def _GetMSBuildExtensions(props_files_of_rules): extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}] for props_file in props_files_of_rules: extensions.append(['Import', {'Project': props_file}]) return [extensions] def _GetMSBuildExtensionTargets(targets_files_of_rules): targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}] for targets_file in sorted(targets_files_of_rules): targets_node.append(['Import', {'Project': targets_file}]) return [targets_node] def _GenerateActionsForMSBuild(spec, actions_to_add): """Add actions accumulated into an actions_to_add, merging as needed. Arguments: spec: the target project dict actions_to_add: dictionary keyed on input name, which maps to a list of dicts describing the actions attached to that input file. Returns: A pair of (action specification, the sources handled by this action). """ sources_handled_by_action = OrderedSet() actions_spec = [] for primary_input, actions in actions_to_add.iteritems(): inputs = OrderedSet() outputs = OrderedSet() descriptions = [] commands = [] for action in actions: inputs.update(OrderedSet(action['inputs'])) outputs.update(OrderedSet(action['outputs'])) descriptions.append(action['description']) cmd = action['command'] # For most actions, add 'call' so that actions that invoke batch files # return and continue executing. msbuild_use_call provides a way to # disable this but I have not seen any adverse effect from doing that # for everything. if action.get('msbuild_use_call', True): cmd = 'call ' + cmd commands.append(cmd) # Add the custom build action for one input file. description = ', and also '.join(descriptions) # We can't join the commands simply with && because the command line will # get too long. See also _AddActions: cygwin's setup_env mustn't be called # for every invocation or the command that sets the PATH will grow too # long. command = ( '\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands)) _AddMSBuildAction(spec, primary_input, inputs, outputs, command, description, sources_handled_by_action, actions_spec) return actions_spec, sources_handled_by_action def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description, sources_handled_by_action, actions_spec): command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) primary_input = _FixPath(primary_input) inputs_array = _FixPaths(inputs) outputs_array = _FixPaths(outputs) additional_inputs = ';'.join([i for i in inputs_array if i != primary_input]) outputs = ';'.join(outputs_array) sources_handled_by_action.add(primary_input) action_spec = ['CustomBuild', {'Include': primary_input}] action_spec.extend( # TODO(jeanluc) 'Document' for all or just if as_sources? [['FileType', 'Document'], ['Command', command], ['Message', description], ['Outputs', outputs] ]) if additional_inputs: action_spec.append(['AdditionalInputs', additional_inputs]) actions_spec.append(action_spec)
gpl-3.0
Athrun29/horizon
openstack_dashboard/dashboards/project/networks/subnets/views.py
12
5601
# Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Views for managing Neutron Subnets. """ from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import tabs from horizon.utils import memoized from horizon import workflows from openstack_dashboard import api from openstack_dashboard.dashboards.project.networks.subnets \ import tables as project_tables from openstack_dashboard.dashboards.project.networks.subnets \ import tabs as project_tabs from openstack_dashboard.dashboards.project.networks.subnets import utils from openstack_dashboard.dashboards.project.networks.subnets \ import workflows as project_workflows class CreateView(workflows.WorkflowView): workflow_class = project_workflows.CreateSubnet @memoized.memoized_method def get_object(self): try: network_id = self.kwargs["network_id"] network = api.neutron.network_get(self.request, network_id) return network except Exception: redirect = reverse('horizon:project:networks:index') msg = _("Unable to retrieve network.") exceptions.handle(self.request, msg, redirect=redirect) def get_initial(self): network = self.get_object() return {"network_id": self.kwargs['network_id'], "network_name": network.name_or_id} class UpdateView(workflows.WorkflowView): workflow_class = project_workflows.UpdateSubnet @memoized.memoized_method def _get_object(self, *args, **kwargs): subnet_id = self.kwargs['subnet_id'] try: return api.neutron.subnet_get(self.request, subnet_id) except Exception: redirect = reverse("horizon:project:networks:index") msg = _('Unable to retrieve subnet details') exceptions.handle(self.request, msg, redirect=redirect) def get_initial(self): initial = super(UpdateView, self).get_initial() subnet = self._get_object() initial['network_id'] = self.kwargs['network_id'] initial['subnet_id'] = subnet['id'] initial['subnet_name'] = subnet['name'] for key in ('cidr', 'ip_version', 'enable_dhcp'): initial[key] = subnet[key] initial['gateway_ip'] = subnet['gateway_ip'] or '' initial['no_gateway'] = (subnet['gateway_ip'] is None) if initial['ip_version'] == 6: initial['ipv6_modes'] = utils.get_ipv6_modes_menu_from_attrs( subnet['ipv6_ra_mode'], subnet['ipv6_address_mode']) initial['dns_nameservers'] = '\n'.join(subnet['dns_nameservers']) pools = ['%s,%s' % (p['start'], p['end']) for p in subnet['allocation_pools']] initial['allocation_pools'] = '\n'.join(pools) routes = ['%s,%s' % (r['destination'], r['nexthop']) for r in subnet['host_routes']] initial['host_routes'] = '\n'.join(routes) return initial class DetailView(tabs.TabView): tab_group_class = project_tabs.SubnetDetailTabs template_name = 'project/networks/subnets/detail.html' page_title = _("Subnet Details") @memoized.memoized_method def get_data(self): subnet_id = self.kwargs['subnet_id'] try: subnet = api.neutron.subnet_get(self.request, subnet_id) except Exception: subnet = [] msg = _('Unable to retrieve subnet details.') exceptions.handle(self.request, msg, redirect=self.get_redirect_url()) else: if subnet.ip_version == 6: ipv6_modes = utils.get_ipv6_modes_menu_from_attrs( subnet.ipv6_ra_mode, subnet.ipv6_address_mode) subnet.ipv6_modes_desc = utils.IPV6_MODE_MAP.get(ipv6_modes) if ('subnetpool_id' in subnet and subnet.subnetpool_id and api.neutron.is_extension_supported(self.request, 'subnet_allocation')): subnetpool = api.neutron.subnetpool_get(self.request, subnet.subnetpool_id) subnet.subnetpool_name = subnetpool.name return subnet def get_context_data(self, **kwargs): context = super(DetailView, self).get_context_data(**kwargs) subnet = self.get_data() table = project_tables.SubnetsTable(self.request, network_id=subnet.network_id) context["subnet"] = subnet context["url"] = self.get_redirect_url() context["actions"] = table.render_row_actions(subnet) return context def get_tabs(self, request, *args, **kwargs): subnet = self.get_data() return self.tab_group_class(request, subnet=subnet, **kwargs) @staticmethod def get_redirect_url(): return reverse('horizon:project:networks:index')
apache-2.0
darrenabbey/ymap
scripts_seqModules/scripts_WGseq/putative_SNPs_from_parent.py
4
8911
### ### Simplify parental putative_SNP list to contain only those loci with an allelic ratio on range [0.25 .. 0.75]. ### ### Uses genome definition files to only output data lines for chromosomes of interest. ### def process_ParentLine(entry_line): global chrNums global chrName global chrCount # Process 'putative_SNPs_v4.txt' file line. # example lines: # chromosome coord ref A T G C # Ca21chr1_C_albicans_SC5314 13988 T 1 12 0 0 # Ca21chr1_C_albicans_SC5314 13993 A 12 0 0 1 # Ca21chr1_C_albicans_SC5314 14003 T 1 412 0 0 # Ca21chr1_C_albicans_SC5314 14004 T 1 413 0 0 parent_line = string.strip(entry_line) parent_line = parent_line.split('\t') P_chr_name = parent_line[0] # chr name of bp. : Ca21chrR_C_albicans_SC5314 P_position = parent_line[1] # chr position of bp. : 2286371 P_refBase = parent_line[2] # reference base at bp. : T P_countA = parent_line[3] # count of A. : 100 P_countT = parent_line[4] # count of T. : 0 P_countG = parent_line[5] # count of G. : 0 P_countC = parent_line[6] # count of C. : 1 # Determine chrID associated with chromosome name. P_chr = 0 for x in range(0,chrCount): if (chrNums[x] != 0): if chrName[x] == P_chr_name: P_chr = x+1 P_chrName = chrName[P_chr-1] return P_chr,P_chrName,P_position,P_countA,P_countT,P_countG,P_countC import string, sys, time genome = sys.argv[ 1] genomeUser = sys.argv[ 2] projectChild = sys.argv[ 3] projectChildUser = sys.argv[ 4] projectParent = sys.argv[ 5] projectParentUser = sys.argv[ 6] main_dir = sys.argv[ 7] logName = main_dir+"users/"+projectChildUser+"/projects/"+projectChild+"/process_log.txt" inputFile_P = main_dir+"users/"+projectParentUser+"/projects/"+projectParent+"/putative_SNPs_v4.txt" inputFile_C = main_dir+"users/"+projectChildUser+"/projects/"+projectChild+"/SNP_CNV_v1.txt" t0 = time.clock() with open(logName, "a") as myfile: myfile.write("\t\t*====================================================================================*\n"); myfile.write("\t\t| Log of 'scripts_seqModules/scripts_ddRADseq/putative_SNPs_from_parent.py' |\n"); myfile.write("\t\t*------------------------------------------------------------------------------------*\n"); #============================================================================================================ # Find location of genome being used. #------------------------------------------------------------------------------------------------------------ genomeDirectory = main_dir+"users/"+genomeUser+"/genomes/"+genome+"/" #============================================================================================================ # Load FastaName from 'reference.txt' for genome in use. #------------------------------------------------------------------------------------------------------------ with open(logName, "a") as myfile: myfile.write("\t\t|\tIdentifying name of reference FASTA file.\n") reference_file = genomeDirectory + '/reference.txt' refFile = open(reference_file,'r') FastaName = refFile.read().strip() refFile.close() FastaName = FastaName.replace(".fasta", "") #============================================================================================================ # Process 'preprocessed_SNPs.txt' file for projectParent to determine initial SNP loci. #------------------------------------------------------------------------------------------------------------ with open(logName, "a") as myfile: myfile.write("\t\t|\tProcessing parent 'putative_SNPs_v4' file -> het loci.\n") # Look up chromosome name strings for genome in use. # Read in and parse : "links_dir/main_script_dir/genome_specific/[genome]/figure_definitions.txt" figureDefinition_file = genomeDirectory + 'figure_definitions.txt' figureDefinitionFile = open(figureDefinition_file,'r') figureDefinitionData = figureDefinitionFile.readlines() # Example lines in figureDefinition_file: # Chr Use Label Name posX posY width height # 1 1 Chr1 Ca21chr1_C_albicans_SC5314 0.15 0.8 0.8 0.0625 # 2 1 Chr2 Ca21chr2_C_albicans_SC5314 0.15 0.7 * 0.0625 # 0 0 Mito Ca19-mtDNA 0.0 0.0 0.0 0.0 with open(logName, "a") as myfile: myfile.write("\t\t|\tDetermining number of chromosomes of interest in genome.\n") # Determine the number of chromosomes of interest in genome. chrName_maxcount = 0 for line in figureDefinitionData: line_parts = string.split(string.strip(line)) chr_num = line_parts[0] if chr_num.isdigit(): chr_num = int(float(line_parts[0])) chr_use = int(float(line_parts[1])) chr_label = line_parts[2] chr_name = line_parts[3] if chr_num > 0: if chr_num > chrName_maxcount: chrName_maxcount = chr_num figureDefinitionFile.close() # Pre-allocate chrName_array chrName = [] for x in range(0, chrName_maxcount): chrName.append([]) with open(logName, "a") as myfile: myfile.write("\t\t|\tGathering name strings for chromosomes.\n") # Gather name strings for chromosomes, in order. figureDefinitionFile = open(figureDefinition_file,'r') chrCounter = 0; chrNums = []; chrNames = []; chrLabels = []; chrShorts = []; for line in figureDefinitionData: line_parts = string.split(string.strip(line)) chr_num = line_parts[0] if chr_num.isdigit(): chr_num = int(float(line_parts[0])) chrNums.append(chr_num); chrCounter += chrCounter; chr_use = int(float(line_parts[1])) chr_label = line_parts[2] chrLabels.append(chr_label); chr_name = line_parts[3] chrNames.append(chr_name); chr_nameShort = chr_label chrShorts.append(chr_nameShort); if chr_num != 0: chrName[int(float(chr_num))-1] = chr_name with open(logName, "a") as myfile: myfile.write("\t\t|\t\t" + str(chr_num) + " : " + chr_name + " = " + chr_nameShort + "\n") figureDefinitionFile.close() # Put the chromosome count into a smaller name for later use. chrCount = chrName_maxcount with open(logName, "a") as myfile: myfile.write("\t\t|\t\tMax chr string : "+str(chrCount)+"\n") #............................................................................................................ with open(logName, "a") as myfile: myfile.write("\t\t|\tOpen parent 'putative_SNPs_v4.txt' file.\n") #............................................................................................................ count = 0 old_chr = 0 fragment_found = 0 last_fragment = 0 current_fragment = 0 log_count = 0 log_offset = 0 print '### Chromosomes of interest : ' for x in range(0,chrCount): if (chrNums[x] != 0): print '### \t' + str(x+1) + ' : ' + str(chrName[x]) with open(logName, "a") as myfile: myfile.write("\t\t|\tGathering data from putative SNP loci in parent.\n") # Open dataset 'putative_CNVs_v1.txt' file. data_P = open(inputFile_P,"r") print '### Data lines for each het locus in parent : [chromosome_name, bp_coordinate, countA, countT, countG, countC]' # Process 'SNP_CNV_v1.txt' file for both parents, line by line... while checking for missing data. line_P = data_P.readline() error_endOfFile = False while (error_endOfFile == False): P_chrID,P_chrName,P_position,P_countA,P_countT,P_countG,P_countC = process_ParentLine(line_P) P_list = [int(float(P_countA)), int(float(P_countT)), int(float(P_countG)), int(float(P_countC))] if (sum(P_list) == 0): P_allelicRatio = 0 else: P_allelicRatio = max(P_list)/float(sum(P_list)) if ((P_allelicRatio < 0.75) and (P_allelicRatio > 0.25) and (sum(P_list) > 20)): print P_chrName+"\t"+str(P_position)+"\t"+str(P_countA)+"\t"+str(P_countT)+"\t"+str(P_countG)+"\t"+str(P_countC) error_endOfFile = False line_P = data_P.readline() if not line_P: # EOF 1 error_endOfFile = True break data_P.close() #------------------------------------------------------------------------------------------------------------ # End of main code block. #============================================================================================================ print '### End of preprocessed parental SNP, child SNP data.' with open(logName, "a") as myfile: myfile.write("\t\t|\tTime to process = " + str(time.clock()-t0) + "\n") myfile.write("\t\t*---------------------------------------------------------------------------*\n"); myfile.write("\t\t| End of 'scripts_seqModules/scripts_ddRADseq/putative_SNPs_from_parent.py' |\n"); myfile.write("\t\t*===========================================================================*\n");
mit
ychaim/peatio-client-python
lib/client.py
3
2169
import urllib2 import json from lib.auth import Auth BASE_URL = 'https://peatio.com' API_BASE_PATH = '/api/v2' API_PATH_DICT = { # GET 'members': '%s/members/me.json', 'markets': '%s/markets.json', #market code required in url as {market}.json 'tickers' : '%s/tickers/%%s.json', #market required in url query string as '?market={market}' 'orders': '%s/orders.json', #order id required in url query string as '?id={id}' 'order': '%s/order.json', #market required in url query string as '?market={market}' 'order_book': '%s/order_book.json', #market required in url query string as '?market={market}' 'trades': '%s/trades.json', #market required in url query string as '?market={market}' 'my_trades': '%s/trades/my.json', 'k': '%s/k.json', #clear orders in all markets 'clear': '%s/orders/clear.json', #delete a specific order 'delete_order': '%s/order/delete.json', #TODO multi orders API 'multi_orders': '%s/orders/multi.json', } def get_api_path(name): path_pattern = API_PATH_DICT[name] return path_pattern % API_BASE_PATH class Client(): def __init__(self, access_key=None, secret_key=None): if access_key and secret_key: self.auth = Auth(access_key, secret_key) else: from conf import ACCESS_KEY, SECRET_KEY self.auth = Auth(ACCESS_KEY, SECRET_KEY) def get(self, path, params=None): verb = "GET" signature, query = self.auth.sign_params(verb, path, params) url = "%s%s?%s&signature=%s" % (BASE_URL, path, query, signature) resp = urllib2.urlopen(url) data = resp.readlines() if len(data): return json.loads(data[0]) def post(self, path, params=None): verb = "POST" print params signature, query = self.auth.sign_params(verb, path, params) url = "%s%s" % (BASE_URL, path) data = "%s&signature=%s" % (query, signature) print data print url resp = urllib2.urlopen(url, data) data = resp.readlines() if len(data): return json.loads(data[0])
cc0-1.0
pwnieexpress/raspberry_pwn
src/pentest/sqlmap/thirdparty/pagerank/__init__.py
27
1141
#!/usr/bin/env python # # The MIT License # # Copyright 2010 Corey Goldberg # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # pass
gpl-3.0
ekalosak/server
tests/unit/test_util_decorators.py
7
1249
""" Unit tests for utility decorators """ from __future__ import division from __future__ import print_function from __future__ import unicode_literals import unittest import sys import time import tests.utils as utils class TestRepeat(unittest.TestCase): """ Test the Repeat decorator """ # lowest possible positive number sleepSeconds = sys.float_info.min @utils.Repeat(sleepSeconds) def repeating(self): self.i += 1 return self.i < self.endIterationAt def testRepeat(self): self.i = 0 self.endIterationAt = 3 self.repeating() self.assertEqual(self.i, self.endIterationAt) class TestTimeout(unittest.TestCase): """ Test the Timeout decorator """ # lowest positive value signal.alarm allows timeoutSecondsLow = 1 @utils.Timeout(timeoutSecondsLow) def timingOut(self): time.sleep(60) # much longer than the timeout @utils.Timeout() def notTimingOut(self): return 0 def testTimeoutException(self): with self.assertRaises(utils.TimeoutException): self.timingOut() def testTimeoutNoException(self): self.assertEquals(self.notTimingOut(), 0) # no exception thrown
apache-2.0
tchernomax/ansible
lib/ansible/plugins/cliconf/routeros.py
12
2615
# # (c) 2017 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re import json from itertools import chain from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils.network.common.utils import to_list from ansible.plugins.cliconf import CliconfBase, enable_mode try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class Cliconf(CliconfBase): def get_device_info(self): device_info = {} device_info['network_os'] = 'RouterOS' resource = self.get(b'/system resource print') data = to_text(resource, errors='surrogate_or_strict').strip() match = re.search(r'version: (\S+)', data) if match: device_info['network_os_version'] = match.group(1) routerboard = self.get(b'/system routerboard print') data = to_text(routerboard, errors='surrogate_or_strict').strip() match = re.search(r'model: (.+)$', data, re.M) if match: device_info['network_os_model'] = match.group(1) identity = self.get(b'/system identity print') data = to_text(identity, errors='surrogate_or_strict').strip() match = re.search(r'name: (.+)$', data, re.M) if match: device_info['network_os_hostname'] = match.group(1) return device_info def get_config(self, source='running', format='text', flags=None): return def edit_config(self, command): return def get(self, command, prompt=None, answer=None, sendonly=False): return self.send_command(command, prompt=prompt, answer=answer, sendonly=sendonly) def get_capabilities(self): result = {} result['rpc'] = self.get_base_rpc() result['network_api'] = 'cliconf' result['device_info'] = self.get_device_info() return json.dumps(result)
gpl-3.0
python-thumbnails/python-thumbnails
tests/test_images.py
2
2628
# -*- coding: utf-8 -*- import hashlib import os import unittest from io import BytesIO from unittest import mock from PIL import Image from thumbnails.conf import settings from thumbnails.images import SourceFile, Thumbnail from . import data from .utils import has_installed class ThumbnailTestCase(unittest.TestCase): def setUp(self): self.instance = Thumbnail(['n', 'ame'], 'jpg') self.instance.size = 200, 400 def test_name(self): self.assertEqual(self.instance.name, 'n/ame') def test_path(self): self.assertTrue(self.instance.path.endswith('thumbnails-cache/n/ame.jpg')) def test_url(self): self.assertTrue(self.instance.url.endswith('/n/ame.jpg')) def test_width(self): self.assertEqual(self.instance.width, 200) def test_height(self): self.assertEqual(self.instance.height, 400) def test_ratio(self): self.assertEqual(self.instance.ratio, 0.5) def test_is_portrait(self): self.assertTrue(self.instance.is_portrait) self.instance.size = 400, 200 self.assertFalse(self.instance.is_portrait) def test_is_landscape(self): self.assertFalse(self.instance.is_landscape) self.instance.size = 400, 200 self.assertTrue(self.instance.is_landscape) @mock.patch('{}.exists'.format(settings.THUMBNAIL_STORAGE_BACKEND)) def test_exists(self, mock_exists): self.instance = Thumbnail(['name'], 'jpg') self.assertTrue(self.instance.exists) mock_exists.assert_called_with(self.instance.path) class SourceImageTestCase(unittest.TestCase): FILE_PATH = os.path.join(os.path.dirname(__file__), 'test_image.jpg') def test_init(self): self.assertEqual(SourceFile(self.FILE_PATH).file, self.FILE_PATH) @unittest.skipIf(not has_installed('django'), 'Django not installed') def test_django_image_files(self): from django.db.models.fields import files field = files.FileField() f = SourceFile(files.FieldFile(field=field, instance=None, name=self.FILE_PATH)) self.assertEqual(f.file, self.FILE_PATH) f = SourceFile(files.ImageFieldFile(field=field, instance=None, name=self.FILE_PATH)) self.assertEqual(f.file, self.FILE_PATH) def test_base64_encoded_string(self): file = SourceFile(data.BASE64_STRING_OF_IMAGE) self.assertEqual( hashlib.sha1(file.open().getvalue()).hexdigest(), '6666212f5302426c845ecb2a2901fae021735f24' ) image = Image.open(BytesIO(file.open().read())) self.assertIsNotNone(image.load())
mit