repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
JTarball/docker-django-polymer
docker/app/app/backend/apps/_archive/content/migrations/0005_auto__add_field_issue_reported__add_field_commentcard_reported__add_fi.py
8
22120
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Issue.reported' db.add_column(u'content_issue', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'CommentCard.reported' db.add_column(u'content_commentcard', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'Update.reported' db.add_column(u'content_update', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'Troubleshooting.reported' db.add_column(u'content_troubleshooting', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'Comment.reported' db.add_column(u'content_comment', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'Dependency.reported' db.add_column(u'content_dependency', 'reported', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) def backwards(self, orm): # Deleting field 'Issue.reported' db.delete_column(u'content_issue', 'reported') # Deleting field 'CommentCard.reported' db.delete_column(u'content_commentcard', 'reported') # Deleting field 'Update.reported' db.delete_column(u'content_update', 'reported') # Deleting field 'Troubleshooting.reported' db.delete_column(u'content_troubleshooting', 'reported') # Deleting field 'Comment.reported' db.delete_column(u'content_comment', 'reported') # Deleting field 'Dependency.reported' db.delete_column(u'content_dependency', 'reported') models = { u'accounts.accountsuser': { 'Meta': {'object_name': 'AccountsUser'}, 'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_subscribed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'content.category': { 'Meta': {'ordering': "['-slug']", 'object_name': 'Category'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['content.Language']", 'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, u'content.comment': { 'Meta': {'object_name': 'Comment'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']", 'null': 'True'}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.Comment']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'content.commentcard': { 'Meta': {'object_name': 'CommentCard'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.PostCard']", 'null': 'True'}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.CommentCard']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'content.dependency': { 'Meta': {'object_name': 'Dependency'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']"}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.Dependency']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'content.genre': { 'Meta': {'object_name': 'Genre'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}), 'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['content.Genre']"}), u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, u'content.issue': { 'Meta': {'object_name': 'Issue'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']"}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.Issue']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'content.language': { 'Meta': {'ordering': "['-slug']", 'object_name': 'Language'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, u'content.liker': { 'Meta': {'object_name': 'Liker'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']"}), 'card': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.PostCard']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'liked_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) }, u'content.post': { 'Meta': {'object_name': 'Post'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']"}), 'content': ('django.db.models.fields.TextField', [], {}), 'content_codeexamples': ('django.db.models.fields.TextField', [], {'default': "'code examples'", 'null': 'True', 'blank': 'True'}), 'content_demo': ('django.db.models.fields.TextField', [], {'default': "'demo'", 'null': 'True', 'blank': 'True'}), 'content_designdecisions': ('django.db.models.fields.TextField', [], {'default': "'design decisions'", 'null': 'True', 'blank': 'True'}), 'content_furtherlearning': ('django.db.models.fields.TextField', [], {'default': "'further learning'", 'null': 'True', 'blank': 'True'}), 'content_gotchas': ('django.db.models.fields.TextField', [], {'default': "'gotcha'", 'null': 'True', 'blank': 'True'}), 'content_markdown': ('django.db.models.fields.TextField', [], {}), 'content_markup': ('django.db.models.fields.TextField', [], {}), 'content_trickstips': ('django.db.models.fields.TextField', [], {'default': "'trickstips'", 'null': 'True', 'blank': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'meta_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'meta_keywords': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'node': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.PostNode']", 'null': 'True', 'blank': 'True'}), 'published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, u'content.postcard': { 'Meta': {'object_name': 'PostCard'}, 'brokens': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}), 'comments_pretty': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '10'}), 'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'created_at_pretty': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '50'}), 'href': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'is_externalhref': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'likes': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}), 'likes_pretty': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '10'}), 'main_post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']"}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}), 'views': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}), 'views_pretty': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '10'}) }, u'content.postnode': { 'Meta': {'object_name': 'PostNode'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['content.PostNode']"}), u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}), u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, u'content.subcategory': { 'Meta': {'ordering': "['-slug']", 'object_name': 'SubCategory'}, 'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['content.Category']", 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, u'content.troubleshooting': { 'Meta': {'object_name': 'Troubleshooting'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']"}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.Troubleshooting']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'content.update': { 'Meta': {'object_name': 'Update'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.AccountsUser']", 'null': 'True'}), 'author_pretty': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_ago': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_reply': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'length': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['content.Post']", 'null': 'True'}), 'reply': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'postreply'", 'null': 'True', 'to': u"orm['content.Update']"}), 'reported': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['content']
gpl-2.0
rasata/ansible
lib/ansible/plugins/filter/mathstuff.py
81
4025
# (c) 2014, Brian Coca <bcoca@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import absolute_import import math import collections from ansible import errors def unique(a): if isinstance(a,collections.Hashable): c = set(a) else: c = [] for x in a: if x not in c: c.append(x) return c def intersect(a, b): if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable): c = set(a) & set(b) else: c = unique(filter(lambda x: x in b, a)) return c def difference(a, b): if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable): c = set(a) - set(b) else: c = unique(filter(lambda x: x not in b, a)) return c def symmetric_difference(a, b): if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable): c = set(a) ^ set(b) else: c = unique(filter(lambda x: x not in intersect(a,b), union(a,b))) return c def union(a, b): if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable): c = set(a) | set(b) else: c = unique(a + b) return c def min(a): _min = __builtins__.get('min') return _min(a); def max(a): _max = __builtins__.get('max') return _max(a); def isnotanumber(x): try: return math.isnan(x) except TypeError: return False def logarithm(x, base=math.e): try: if base == 10: return math.log10(x) else: return math.log(x, base) except TypeError as e: raise errors.AnsibleFilterError('log() can only be used on numbers: %s' % str(e)) def power(x, y): try: return math.pow(x, y) except TypeError as e: raise errors.AnsibleFilterError('pow() can only be used on numbers: %s' % str(e)) def inversepower(x, base=2): try: if base == 2: return math.sqrt(x) else: return math.pow(x, 1.0/float(base)) except TypeError as e: raise errors.AnsibleFilterError('root() can only be used on numbers: %s' % str(e)) def human_readable(size, isbits=False, unit=None): base = 'bits' if isbits else 'Bytes' suffix = '' ranges = ( (1<<70, 'Z'), (1<<60, 'E'), (1<<50, 'P'), (1<<40, 'T'), (1<<30, 'G'), (1<<20, 'M'), (1<<10, 'K'), (1, base) ) for limit, suffix in ranges: if (unit is None and size >= limit) or \ unit is not None and unit.upper() == suffix: break if limit != 1: suffix += base[0] return '%.2f %s' % (float(size)/ limit, suffix) class FilterModule(object): ''' Ansible math jinja2 filters ''' def filters(self): return { # general math 'isnan': isnotanumber, 'min' : min, 'max' : max, # exponents and logarithms 'log': logarithm, 'pow': power, 'root': inversepower, # set theory 'unique' : unique, 'intersect': intersect, 'difference': difference, 'symmetric_difference': symmetric_difference, 'union': union, # computer theory 'human_readable' : human_readable, }
gpl-3.0
kinverarity1/las-reader
lasio/convert_version.py
2
1144
import argparse import os import sys import lasio def convert_version(): args = get_convert_version_parser().parse_args(sys.argv[1:]) assert os.path.isfile(args.input) las = lasio.read(args.input, ignore_header_errors=args.ignore_header_errors) if os.path.isfile(args.output) and not args.overwrite: raise OSError("Output file already exists") with open(args.output, "w") as f: las.write(f, version=float(args.to)) def get_convert_version_parser(): parser = argparse.ArgumentParser( "Convert LAS file version", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument("-t", "--to", default=2, help="Version to convert to") parser.add_argument( "--overwrite", action="store_true", default=False, help="Overwrite output file if it already exists", ) parser.add_argument( "-i", "--ignore-header-errors", action="store_true", help="Ignore header section errors.", default=False, ) parser.add_argument("input") parser.add_argument("output") return parser
mit
AnishShah/tensorflow
tensorflow/python/layers/utils_test.py
38
3953
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tf.layers.utils.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.layers import utils from tensorflow.python.platform import test class ConvUtilsTest(test.TestCase): def testConvertDataFormat(self): self.assertEqual('NCDHW', utils.convert_data_format('channels_first', 5)) self.assertEqual('NCHW', utils.convert_data_format('channels_first', 4)) self.assertEqual('NCW', utils.convert_data_format('channels_first', 3)) self.assertEqual('NHWC', utils.convert_data_format('channels_last', 4)) self.assertEqual('NWC', utils.convert_data_format('channels_last', 3)) self.assertEqual('NDHWC', utils.convert_data_format('channels_last', 5)) with self.assertRaises(ValueError): utils.convert_data_format('invalid', 2) def testNormalizeTuple(self): self.assertEqual((2, 2, 2), utils.normalize_tuple(2, n=3, name='strides')) self.assertEqual( (2, 1, 2), utils.normalize_tuple((2, 1, 2), n=3, name='strides')) with self.assertRaises(ValueError): utils.normalize_tuple((2, 1), n=3, name='strides') with self.assertRaises(ValueError): utils.normalize_tuple(None, n=3, name='strides') def testNormalizeDataFormat(self): self.assertEqual( 'channels_last', utils.normalize_data_format('Channels_Last')) self.assertEqual( 'channels_first', utils.normalize_data_format('CHANNELS_FIRST')) with self.assertRaises(ValueError): utils.normalize_data_format('invalid') def testNormalizePadding(self): self.assertEqual('same', utils.normalize_padding('SAME')) self.assertEqual('valid', utils.normalize_padding('VALID')) with self.assertRaises(ValueError): utils.normalize_padding('invalid') def testConvOutputLength(self): self.assertEqual(4, utils.conv_output_length(4, 2, 'same', 1, 1)) self.assertEqual(2, utils.conv_output_length(4, 2, 'same', 2, 1)) self.assertEqual(3, utils.conv_output_length(4, 2, 'valid', 1, 1)) self.assertEqual(2, utils.conv_output_length(4, 2, 'valid', 2, 1)) self.assertEqual(5, utils.conv_output_length(4, 2, 'full', 1, 1)) self.assertEqual(3, utils.conv_output_length(4, 2, 'full', 2, 1)) self.assertEqual(2, utils.conv_output_length(5, 2, 'valid', 2, 2)) def testConvInputLength(self): self.assertEqual(3, utils.conv_input_length(4, 2, 'same', 1)) self.assertEqual(2, utils.conv_input_length(2, 2, 'same', 2)) self.assertEqual(4, utils.conv_input_length(3, 2, 'valid', 1)) self.assertEqual(4, utils.conv_input_length(2, 2, 'valid', 2)) self.assertEqual(3, utils.conv_input_length(4, 2, 'full', 1)) self.assertEqual(4, utils.conv_input_length(3, 2, 'full', 2)) def testDeconvOutputLength(self): self.assertEqual(4, utils.deconv_output_length(4, 2, 'same', 1)) self.assertEqual(8, utils.deconv_output_length(4, 2, 'same', 2)) self.assertEqual(5, utils.deconv_output_length(4, 2, 'valid', 1)) self.assertEqual(8, utils.deconv_output_length(4, 2, 'valid', 2)) self.assertEqual(3, utils.deconv_output_length(4, 2, 'full', 1)) self.assertEqual(6, utils.deconv_output_length(4, 2, 'full', 2)) if __name__ == '__main__': test.main()
apache-2.0
takis/django
tests/field_deconstruction/tests.py
189
18358
from __future__ import unicode_literals from django.apps import apps from django.db import models from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_lru_cache from django.utils import six class FieldDeconstructionTests(SimpleTestCase): """ Tests the deconstruct() method on all core fields. """ def test_name(self): """ Tests the outputting of the correct name if assigned one. """ # First try using a "normal" field field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("is_awesome_test") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "is_awesome_test") self.assertIsInstance(name, six.text_type) # Now try with a ForeignKey field = models.ForeignKey("some_fake.ModelName", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("author") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "author") def test_auto_field(self): field = models.AutoField(primary_key=True) field.set_attributes_from_name("id") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.AutoField") self.assertEqual(args, []) self.assertEqual(kwargs, {"primary_key": True}) def test_big_integer_field(self): field = models.BigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BigIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_boolean_field(self): field = models.BooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BooleanField(default=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {"default": True}) def test_char_field(self): field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65}) field = models.CharField(max_length=65, null=True, blank=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True}) def test_char_field_choices(self): field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two"))) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1}) def test_csi_field(self): field = models.CommaSeparatedIntegerField(max_length=100) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 100}) def test_date_field(self): field = models.DateField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now": True}) def test_datetime_field(self): field = models.DateTimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateTimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True}) # Bug #21785 field = models.DateTimeField(auto_now=True, auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True}) def test_decimal_field(self): field = models.DecimalField(max_digits=5, decimal_places=2) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2}) def test_decimal_field_0_decimal_places(self): """ A DecimalField with decimal_places=0 should work (#22272). """ field = models.DecimalField(max_digits=5, decimal_places=0) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0}) def test_email_field(self): field = models.EmailField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 254}) field = models.EmailField(max_length=255) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 255}) def test_file_field(self): field = models.FileField(upload_to="foo/bar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar"}) # Test max_length field = models.FileField(upload_to="foo/bar", max_length=200) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200}) def test_file_path_field(self): field = models.FilePathField(match=".*\.txt$") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"match": ".*\.txt$"}) field = models.FilePathField(recursive=True, allow_folders=True, max_length=123) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123}) def test_float_field(self): field = models.FloatField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FloatField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_foreign_key(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.ForeignKey("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swap detection for swappable model field = models.ForeignKey("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test nonexistent (for now) model field = models.ForeignKey("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.Else", "on_delete": models.CASCADE}) # Test on_delete field = models.ForeignKey("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL}) # Test to_field preservation field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "to_field": "foobar", "on_delete": models.CASCADE}) # Test related_name preservation field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "foobar", "on_delete": models.CASCADE}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_foreign_key_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ForeignKey("auth.Permission", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") def test_image_field(self): field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ImageField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"}) def test_integer_field(self): field = models.IntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_ip_address_field(self): field = models.IPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_generic_ip_address_field(self): field = models.GenericIPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.GenericIPAddressField(protocol="IPv6") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {"protocol": "IPv6"}) def test_many_to_many_field(self): # Test normal field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swappable field = models.ManyToManyField("auth.User") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test through field = models.ManyToManyField("auth.Permission", through="auth.Group") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"}) # Test custom db_table field = models.ManyToManyField("auth.Permission", db_table="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"}) # Test related_name field = models.ManyToManyField("auth.Permission", related_name="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_many_to_many_field_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") def test_null_boolean_field(self): field = models.NullBooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.NullBooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_integer_field(self): field = models.PositiveIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_small_integer_field(self): field = models.PositiveSmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveSmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_slug_field(self): field = models.SlugField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.SlugField(db_index=False, max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {"db_index": False, "max_length": 231}) def test_small_integer_field(self): field = models.SmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_text_field(self): field = models.TextField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TextField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_time_field(self): field = models.TimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.TimeField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now': True}) field = models.TimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now_add': True}) def test_url_field(self): field = models.URLField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.URLField(max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 231}) def test_binary_field(self): field = models.BinaryField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BinaryField") self.assertEqual(args, []) self.assertEqual(kwargs, {})
bsd-3-clause
ClusterLabs/crmsh
crmsh/ordereddict.py
2
4567
# Copyright (c) 2009 Raymond Hettinger # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation files # (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, # publish, distribute, sublicense, and/or sell copies of the Software, # and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. try: from collections import OrderedDict except ImportError: from UserDict import DictMixin class OrderedDict(dict, DictMixin): def __init__(self, *args, **kwds): if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__end except AttributeError: self.clear() self.update(*args, **kwds) def clear(self): self.__end = end = [] end += [None, end, end] # sentinel node for doubly linked list self.__map = {} # key --> [key, prev, next] dict.clear(self) def __setitem__(self, key, value): if key not in self: end = self.__end curr = end[1] curr[2] = end[1] = self.__map[key] = [key, curr, end] dict.__setitem__(self, key, value) def __delitem__(self, key): dict.__delitem__(self, key) key, prev, next_ = self.__map.pop(key) prev[2] = next_ next_[1] = prev def __iter__(self): end = self.__end curr = end[2] while curr is not end: yield curr[0] curr = curr[2] def __reversed__(self): end = self.__end curr = end[1] while curr is not end: yield curr[0] curr = curr[1] def popitem(self, last=True): if not self: raise KeyError('dictionary is empty') if last: key = next(reversed(self)) else: key = next(iter(self)) value = self.pop(key) return key, value def __reduce__(self): items = [[k, self[k]] for k in self] tmp = self.__map, self.__end del self.__map, self.__end inst_dict = vars(self).copy() self.__map, self.__end = tmp if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def keys(self): return list(self) setdefault = DictMixin.setdefault update = DictMixin.update pop = DictMixin.pop values = DictMixin.values items = DictMixin.items iterkeys = DictMixin.iterkeys itervalues = DictMixin.itervalues iteritems = DictMixin.iteritems def __repr__(self): if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, list(self.items())) def copy(self): return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): if isinstance(other, OrderedDict): if len(self) != len(other): return False for p, q in zip(list(self.items()), list(other.items())): if p != q: return False return True return dict.__eq__(self, other) def __ne__(self, other): return not self == other odict = OrderedDict
gpl-2.0
nyuszika7h/youtube-dl
youtube_dl/extractor/crunchyroll.py
10
28745
# coding: utf-8 from __future__ import unicode_literals import re import json import zlib from hashlib import sha1 from math import pow, sqrt, floor from .common import InfoExtractor from .vrv import VRVIE from ..compat import ( compat_b64decode, compat_etree_Element, compat_etree_fromstring, compat_str, compat_urllib_parse_urlencode, compat_urllib_request, compat_urlparse, ) from ..utils import ( ExtractorError, bytes_to_intlist, extract_attributes, float_or_none, intlist_to_bytes, int_or_none, lowercase_escape, merge_dicts, remove_end, sanitized_Request, urlencode_postdata, xpath_text, ) from ..aes import ( aes_cbc_decrypt, ) class CrunchyrollBaseIE(InfoExtractor): _LOGIN_URL = 'https://www.crunchyroll.com/login' _LOGIN_FORM = 'login_form' _NETRC_MACHINE = 'crunchyroll' def _call_rpc_api(self, method, video_id, note=None, data=None): data = data or {} data['req'] = 'RpcApi' + method data = compat_urllib_parse_urlencode(data).encode('utf-8') return self._download_xml( 'https://www.crunchyroll.com/xml/', video_id, note, fatal=False, data=data, headers={ 'Content-Type': 'application/x-www-form-urlencoded', }) def _login(self): username, password = self._get_login_info() if username is None: return login_page = self._download_webpage( self._LOGIN_URL, None, 'Downloading login page') def is_logged(webpage): return 'href="/logout"' in webpage # Already logged in if is_logged(login_page): return login_form_str = self._search_regex( r'(?P<form><form[^>]+?id=(["\'])%s\2[^>]*>)' % self._LOGIN_FORM, login_page, 'login form', group='form') post_url = extract_attributes(login_form_str).get('action') if not post_url: post_url = self._LOGIN_URL elif not post_url.startswith('http'): post_url = compat_urlparse.urljoin(self._LOGIN_URL, post_url) login_form = self._form_hidden_inputs(self._LOGIN_FORM, login_page) login_form.update({ 'login_form[name]': username, 'login_form[password]': password, }) response = self._download_webpage( post_url, None, 'Logging in', 'Wrong login info', data=urlencode_postdata(login_form), headers={'Content-Type': 'application/x-www-form-urlencoded'}) # Successful login if is_logged(response): return error = self._html_search_regex( '(?s)<ul[^>]+class=["\']messages["\'][^>]*>(.+?)</ul>', response, 'error message', default=None) if error: raise ExtractorError('Unable to login: %s' % error, expected=True) raise ExtractorError('Unable to log in') def _real_initialize(self): self._login() @staticmethod def _add_skip_wall(url): parsed_url = compat_urlparse.urlparse(url) qs = compat_urlparse.parse_qs(parsed_url.query) # Always force skip_wall to bypass maturity wall, namely 18+ confirmation message: # > This content may be inappropriate for some people. # > Are you sure you want to continue? # since it's not disabled by default in crunchyroll account's settings. # See https://github.com/ytdl-org/youtube-dl/issues/7202. qs['skip_wall'] = ['1'] return compat_urlparse.urlunparse( parsed_url._replace(query=compat_urllib_parse_urlencode(qs, True))) class CrunchyrollIE(CrunchyrollBaseIE, VRVIE): IE_NAME = 'crunchyroll' _VALID_URL = r'https?://(?:(?P<prefix>www|m)\.)?(?P<url>crunchyroll\.(?:com|fr)/(?:media(?:-|/\?id=)|(?:[^/]*/){1,2}[^/?&]*?)(?P<video_id>[0-9]+))(?:[/?&]|$)' _TESTS = [{ 'url': 'http://www.crunchyroll.com/wanna-be-the-strongest-in-the-world/episode-1-an-idol-wrestler-is-born-645513', 'info_dict': { 'id': '645513', 'ext': 'mp4', 'title': 'Wanna be the Strongest in the World Episode 1 – An Idol-Wrestler is Born!', 'description': 'md5:2d17137920c64f2f49981a7797d275ef', 'thumbnail': r're:^https?://.*\.jpg$', 'uploader': 'Yomiuri Telecasting Corporation (YTV)', 'upload_date': '20131013', 'url': 're:(?!.*&amp)', }, 'params': { # rtmp 'skip_download': True, }, 'skip': 'Video gone', }, { 'url': 'http://www.crunchyroll.com/media-589804/culture-japan-1', 'info_dict': { 'id': '589804', 'ext': 'flv', 'title': 'Culture Japan Episode 1 – Rebuilding Japan after the 3.11', 'description': 'md5:2fbc01f90b87e8e9137296f37b461c12', 'thumbnail': r're:^https?://.*\.jpg$', 'uploader': 'Danny Choo Network', 'upload_date': '20120213', }, 'params': { # rtmp 'skip_download': True, }, 'skip': 'Video gone', }, { 'url': 'http://www.crunchyroll.com/rezero-starting-life-in-another-world-/episode-5-the-morning-of-our-promise-is-still-distant-702409', 'info_dict': { 'id': '702409', 'ext': 'mp4', 'title': compat_str, 'description': compat_str, 'thumbnail': r're:^https?://.*\.jpg$', 'uploader': 'Re:Zero Partners', 'timestamp': 1462098900, 'upload_date': '20160501', }, 'params': { # m3u8 download 'skip_download': True, }, }, { 'url': 'http://www.crunchyroll.com/konosuba-gods-blessing-on-this-wonderful-world/episode-1-give-me-deliverance-from-this-judicial-injustice-727589', 'info_dict': { 'id': '727589', 'ext': 'mp4', 'title': compat_str, 'description': compat_str, 'thumbnail': r're:^https?://.*\.jpg$', 'uploader': 'Kadokawa Pictures Inc.', 'timestamp': 1484130900, 'upload_date': '20170111', 'series': compat_str, 'season': "KONOSUBA -God's blessing on this wonderful world! 2", 'season_number': 2, 'episode': 'Give Me Deliverance From This Judicial Injustice!', 'episode_number': 1, }, 'params': { # m3u8 download 'skip_download': True, }, }, { 'url': 'http://www.crunchyroll.fr/girl-friend-beta/episode-11-goodbye-la-mode-661697', 'only_matching': True, }, { # geo-restricted (US), 18+ maturity wall, non-premium available 'url': 'http://www.crunchyroll.com/cosplay-complex-ova/episode-1-the-birth-of-the-cosplay-club-565617', 'only_matching': True, }, { # A description with double quotes 'url': 'http://www.crunchyroll.com/11eyes/episode-1-piros-jszaka-red-night-535080', 'info_dict': { 'id': '535080', 'ext': 'mp4', 'title': compat_str, 'description': compat_str, 'uploader': 'Marvelous AQL Inc.', 'timestamp': 1255512600, 'upload_date': '20091014', }, 'params': { # Just test metadata extraction 'skip_download': True, }, }, { # make sure we can extract an uploader name that's not a link 'url': 'http://www.crunchyroll.com/hakuoki-reimeiroku/episode-1-dawn-of-the-divine-warriors-606899', 'info_dict': { 'id': '606899', 'ext': 'mp4', 'title': 'Hakuoki Reimeiroku Episode 1 – Dawn of the Divine Warriors', 'description': 'Ryunosuke was left to die, but Serizawa-san asked him a simple question "Do you want to live?"', 'uploader': 'Geneon Entertainment', 'upload_date': '20120717', }, 'params': { # just test metadata extraction 'skip_download': True, }, 'skip': 'Video gone', }, { # A video with a vastly different season name compared to the series name 'url': 'http://www.crunchyroll.com/nyarko-san-another-crawling-chaos/episode-1-test-590532', 'info_dict': { 'id': '590532', 'ext': 'mp4', 'title': compat_str, 'description': compat_str, 'uploader': 'TV TOKYO', 'timestamp': 1330956000, 'upload_date': '20120305', 'series': 'Nyarko-san: Another Crawling Chaos', 'season': 'Haiyoru! Nyaruani (ONA)', }, 'params': { # Just test metadata extraction 'skip_download': True, }, }, { 'url': 'http://www.crunchyroll.com/media-723735', 'only_matching': True, }, { 'url': 'https://www.crunchyroll.com/en-gb/mob-psycho-100/episode-2-urban-legends-encountering-rumors-780921', 'only_matching': True, }] _FORMAT_IDS = { '360': ('60', '106'), '480': ('61', '106'), '720': ('62', '106'), '1080': ('80', '108'), } def _download_webpage(self, url_or_request, *args, **kwargs): request = (url_or_request if isinstance(url_or_request, compat_urllib_request.Request) else sanitized_Request(url_or_request)) # Accept-Language must be set explicitly to accept any language to avoid issues # similar to https://github.com/ytdl-org/youtube-dl/issues/6797. # Along with IP address Crunchyroll uses Accept-Language to guess whether georestriction # should be imposed or not (from what I can see it just takes the first language # ignoring the priority and requires it to correspond the IP). By the way this causes # Crunchyroll to not work in georestriction cases in some browsers that don't place # the locale lang first in header. However allowing any language seems to workaround the issue. request.add_header('Accept-Language', '*') return super(CrunchyrollBaseIE, self)._download_webpage(request, *args, **kwargs) def _decrypt_subtitles(self, data, iv, id): data = bytes_to_intlist(compat_b64decode(data)) iv = bytes_to_intlist(compat_b64decode(iv)) id = int(id) def obfuscate_key_aux(count, modulo, start): output = list(start) for _ in range(count): output.append(output[-1] + output[-2]) # cut off start values output = output[2:] output = list(map(lambda x: x % modulo + 33, output)) return output def obfuscate_key(key): num1 = int(floor(pow(2, 25) * sqrt(6.9))) num2 = (num1 ^ key) << 5 num3 = key ^ num1 num4 = num3 ^ (num3 >> 3) ^ num2 prefix = intlist_to_bytes(obfuscate_key_aux(20, 97, (1, 2))) shaHash = bytes_to_intlist(sha1(prefix + str(num4).encode('ascii')).digest()) # Extend 160 Bit hash to 256 Bit return shaHash + [0] * 12 key = obfuscate_key(id) decrypted_data = intlist_to_bytes(aes_cbc_decrypt(data, key, iv)) return zlib.decompress(decrypted_data) def _convert_subtitles_to_srt(self, sub_root): output = '' for i, event in enumerate(sub_root.findall('./events/event'), 1): start = event.attrib['start'].replace('.', ',') end = event.attrib['end'].replace('.', ',') text = event.attrib['text'].replace('\\N', '\n') output += '%d\n%s --> %s\n%s\n\n' % (i, start, end, text) return output def _convert_subtitles_to_ass(self, sub_root): output = '' def ass_bool(strvalue): assvalue = '0' if strvalue == '1': assvalue = '-1' return assvalue output = '[Script Info]\n' output += 'Title: %s\n' % sub_root.attrib['title'] output += 'ScriptType: v4.00+\n' output += 'WrapStyle: %s\n' % sub_root.attrib['wrap_style'] output += 'PlayResX: %s\n' % sub_root.attrib['play_res_x'] output += 'PlayResY: %s\n' % sub_root.attrib['play_res_y'] output += """ [V4+ Styles] Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding """ for style in sub_root.findall('./styles/style'): output += 'Style: ' + style.attrib['name'] output += ',' + style.attrib['font_name'] output += ',' + style.attrib['font_size'] output += ',' + style.attrib['primary_colour'] output += ',' + style.attrib['secondary_colour'] output += ',' + style.attrib['outline_colour'] output += ',' + style.attrib['back_colour'] output += ',' + ass_bool(style.attrib['bold']) output += ',' + ass_bool(style.attrib['italic']) output += ',' + ass_bool(style.attrib['underline']) output += ',' + ass_bool(style.attrib['strikeout']) output += ',' + style.attrib['scale_x'] output += ',' + style.attrib['scale_y'] output += ',' + style.attrib['spacing'] output += ',' + style.attrib['angle'] output += ',' + style.attrib['border_style'] output += ',' + style.attrib['outline'] output += ',' + style.attrib['shadow'] output += ',' + style.attrib['alignment'] output += ',' + style.attrib['margin_l'] output += ',' + style.attrib['margin_r'] output += ',' + style.attrib['margin_v'] output += ',' + style.attrib['encoding'] output += '\n' output += """ [Events] Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text """ for event in sub_root.findall('./events/event'): output += 'Dialogue: 0' output += ',' + event.attrib['start'] output += ',' + event.attrib['end'] output += ',' + event.attrib['style'] output += ',' + event.attrib['name'] output += ',' + event.attrib['margin_l'] output += ',' + event.attrib['margin_r'] output += ',' + event.attrib['margin_v'] output += ',' + event.attrib['effect'] output += ',' + event.attrib['text'] output += '\n' return output def _extract_subtitles(self, subtitle): sub_root = compat_etree_fromstring(subtitle) return [{ 'ext': 'srt', 'data': self._convert_subtitles_to_srt(sub_root), }, { 'ext': 'ass', 'data': self._convert_subtitles_to_ass(sub_root), }] def _get_subtitles(self, video_id, webpage): subtitles = {} for sub_id, sub_name in re.findall(r'\bssid=([0-9]+)"[^>]+?\btitle="([^"]+)', webpage): sub_doc = self._call_rpc_api( 'Subtitle_GetXml', video_id, 'Downloading subtitles for ' + sub_name, data={ 'subtitle_script_id': sub_id, }) if not isinstance(sub_doc, compat_etree_Element): continue sid = sub_doc.get('id') iv = xpath_text(sub_doc, 'iv', 'subtitle iv') data = xpath_text(sub_doc, 'data', 'subtitle data') if not sid or not iv or not data: continue subtitle = self._decrypt_subtitles(data, iv, sid).decode('utf-8') lang_code = self._search_regex(r'lang_code=["\']([^"\']+)', subtitle, 'subtitle_lang_code', fatal=False) if not lang_code: continue subtitles[lang_code] = self._extract_subtitles(subtitle) return subtitles def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('video_id') if mobj.group('prefix') == 'm': mobile_webpage = self._download_webpage(url, video_id, 'Downloading mobile webpage') webpage_url = self._search_regex(r'<link rel="canonical" href="([^"]+)" />', mobile_webpage, 'webpage_url') else: webpage_url = 'http://www.' + mobj.group('url') webpage = self._download_webpage( self._add_skip_wall(webpage_url), video_id, headers=self.geo_verification_headers()) note_m = self._html_search_regex( r'<div class="showmedia-trailer-notice">(.+?)</div>', webpage, 'trailer-notice', default='') if note_m: raise ExtractorError(note_m) mobj = re.search(r'Page\.messaging_box_controller\.addItems\(\[(?P<msg>{.+?})\]\)', webpage) if mobj: msg = json.loads(mobj.group('msg')) if msg.get('type') == 'error': raise ExtractorError('crunchyroll returned error: %s' % msg['message_body'], expected=True) if 'To view this, please log in to verify you are 18 or older.' in webpage: self.raise_login_required() media = self._parse_json(self._search_regex( r'vilos\.config\.media\s*=\s*({.+?});', webpage, 'vilos media', default='{}'), video_id) media_metadata = media.get('metadata') or {} language = self._search_regex( r'(?:vilos\.config\.player\.language|LOCALE)\s*=\s*(["\'])(?P<lang>(?:(?!\1).)+)\1', webpage, 'language', default=None, group='lang') video_title = self._html_search_regex( (r'(?s)<h1[^>]*>((?:(?!<h1).)*?<(?:span[^>]+itemprop=["\']title["\']|meta[^>]+itemprop=["\']position["\'])[^>]*>(?:(?!<h1).)+?)</h1>', r'<title>(.+?),\s+-\s+.+? Crunchyroll'), webpage, 'video_title', default=None) if not video_title: video_title = re.sub(r'^Watch\s+', '', self._og_search_description(webpage)) video_title = re.sub(r' {2,}', ' ', video_title) video_description = (self._parse_json(self._html_search_regex( r'<script[^>]*>\s*.+?\[media_id=%s\].+?({.+?"description"\s*:.+?})\);' % video_id, webpage, 'description', default='{}'), video_id) or media_metadata).get('description') if video_description: video_description = lowercase_escape(video_description.replace(r'\r\n', '\n')) video_uploader = self._html_search_regex( # try looking for both an uploader that's a link and one that's not [r'<a[^>]+href="/publisher/[^"]+"[^>]*>([^<]+)</a>', r'<div>\s*Publisher:\s*<span>\s*(.+?)\s*</span>\s*</div>'], webpage, 'video_uploader', default=False) formats = [] for stream in media.get('streams', []): audio_lang = stream.get('audio_lang') hardsub_lang = stream.get('hardsub_lang') vrv_formats = self._extract_vrv_formats( stream.get('url'), video_id, stream.get('format'), audio_lang, hardsub_lang) for f in vrv_formats: if not hardsub_lang: f['preference'] = 1 language_preference = 0 if audio_lang == language: language_preference += 1 if hardsub_lang == language: language_preference += 1 if language_preference: f['language_preference'] = language_preference formats.extend(vrv_formats) if not formats: available_fmts = [] for a, fmt in re.findall(r'(<a[^>]+token=["\']showmedia\.([0-9]{3,4})p["\'][^>]+>)', webpage): attrs = extract_attributes(a) href = attrs.get('href') if href and '/freetrial' in href: continue available_fmts.append(fmt) if not available_fmts: for p in (r'token=["\']showmedia\.([0-9]{3,4})p"', r'showmedia\.([0-9]{3,4})p'): available_fmts = re.findall(p, webpage) if available_fmts: break if not available_fmts: available_fmts = self._FORMAT_IDS.keys() video_encode_ids = [] for fmt in available_fmts: stream_quality, stream_format = self._FORMAT_IDS[fmt] video_format = fmt + 'p' stream_infos = [] streamdata = self._call_rpc_api( 'VideoPlayer_GetStandardConfig', video_id, 'Downloading media info for %s' % video_format, data={ 'media_id': video_id, 'video_format': stream_format, 'video_quality': stream_quality, 'current_page': url, }) if isinstance(streamdata, compat_etree_Element): stream_info = streamdata.find('./{default}preload/stream_info') if stream_info is not None: stream_infos.append(stream_info) stream_info = self._call_rpc_api( 'VideoEncode_GetStreamInfo', video_id, 'Downloading stream info for %s' % video_format, data={ 'media_id': video_id, 'video_format': stream_format, 'video_encode_quality': stream_quality, }) if isinstance(stream_info, compat_etree_Element): stream_infos.append(stream_info) for stream_info in stream_infos: video_encode_id = xpath_text(stream_info, './video_encode_id') if video_encode_id in video_encode_ids: continue video_encode_ids.append(video_encode_id) video_file = xpath_text(stream_info, './file') if not video_file: continue if video_file.startswith('http'): formats.extend(self._extract_m3u8_formats( video_file, video_id, 'mp4', entry_protocol='m3u8_native', m3u8_id='hls', fatal=False)) continue video_url = xpath_text(stream_info, './host') if not video_url: continue metadata = stream_info.find('./metadata') format_info = { 'format': video_format, 'height': int_or_none(xpath_text(metadata, './height')), 'width': int_or_none(xpath_text(metadata, './width')), } if '.fplive.net/' in video_url: video_url = re.sub(r'^rtmpe?://', 'http://', video_url.strip()) parsed_video_url = compat_urlparse.urlparse(video_url) direct_video_url = compat_urlparse.urlunparse(parsed_video_url._replace( netloc='v.lvlt.crcdn.net', path='%s/%s' % (remove_end(parsed_video_url.path, '/'), video_file.split(':')[-1]))) if self._is_valid_url(direct_video_url, video_id, video_format): format_info.update({ 'format_id': 'http-' + video_format, 'url': direct_video_url, }) formats.append(format_info) continue format_info.update({ 'format_id': 'rtmp-' + video_format, 'url': video_url, 'play_path': video_file, 'ext': 'flv', }) formats.append(format_info) self._sort_formats(formats, ('preference', 'language_preference', 'height', 'width', 'tbr', 'fps')) metadata = self._call_rpc_api( 'VideoPlayer_GetMediaMetadata', video_id, note='Downloading media info', data={ 'media_id': video_id, }) subtitles = {} for subtitle in media.get('subtitles', []): subtitle_url = subtitle.get('url') if not subtitle_url: continue subtitles.setdefault(subtitle.get('language', 'enUS'), []).append({ 'url': subtitle_url, 'ext': subtitle.get('format', 'ass'), }) if not subtitles: subtitles = self.extract_subtitles(video_id, webpage) # webpage provide more accurate data than series_title from XML series = self._html_search_regex( r'(?s)<h\d[^>]+\bid=["\']showmedia_about_episode_num[^>]+>(.+?)</h\d', webpage, 'series', fatal=False) season = episode = episode_number = duration = thumbnail = None if isinstance(metadata, compat_etree_Element): season = xpath_text(metadata, 'series_title') episode = xpath_text(metadata, 'episode_title') episode_number = int_or_none(xpath_text(metadata, 'episode_number')) duration = float_or_none(media_metadata.get('duration'), 1000) thumbnail = xpath_text(metadata, 'episode_image_url') if not episode: episode = media_metadata.get('title') if not episode_number: episode_number = int_or_none(media_metadata.get('episode_number')) if not thumbnail: thumbnail = media_metadata.get('thumbnail', {}).get('url') season_number = int_or_none(self._search_regex( r'(?s)<h\d[^>]+id=["\']showmedia_about_episode_num[^>]+>.+?</h\d>\s*<h4>\s*Season (\d+)', webpage, 'season number', default=None)) info = self._search_json_ld(webpage, video_id, default={}) return merge_dicts({ 'id': video_id, 'title': video_title, 'description': video_description, 'duration': duration, 'thumbnail': thumbnail, 'uploader': video_uploader, 'series': series, 'season': season, 'season_number': season_number, 'episode': episode, 'episode_number': episode_number, 'subtitles': subtitles, 'formats': formats, }, info) class CrunchyrollShowPlaylistIE(CrunchyrollBaseIE): IE_NAME = 'crunchyroll:playlist' _VALID_URL = r'https?://(?:(?P<prefix>www|m)\.)?(?P<url>crunchyroll\.com/(?!(?:news|anime-news|library|forum|launchcalendar|lineup|store|comics|freetrial|login|media-\d+))(?P<id>[\w\-]+))/?(?:\?|$)' _TESTS = [{ 'url': 'http://www.crunchyroll.com/a-bridge-to-the-starry-skies-hoshizora-e-kakaru-hashi', 'info_dict': { 'id': 'a-bridge-to-the-starry-skies-hoshizora-e-kakaru-hashi', 'title': 'A Bridge to the Starry Skies - Hoshizora e Kakaru Hashi' }, 'playlist_count': 13, }, { # geo-restricted (US), 18+ maturity wall, non-premium available 'url': 'http://www.crunchyroll.com/cosplay-complex-ova', 'info_dict': { 'id': 'cosplay-complex-ova', 'title': 'Cosplay Complex OVA' }, 'playlist_count': 3, 'skip': 'Georestricted', }, { # geo-restricted (US), 18+ maturity wall, non-premium will be available since 2015.11.14 'url': 'http://www.crunchyroll.com/ladies-versus-butlers?skip_wall=1', 'only_matching': True, }] def _real_extract(self, url): show_id = self._match_id(url) webpage = self._download_webpage( self._add_skip_wall(url), show_id, headers=self.geo_verification_headers()) title = self._html_search_meta('name', webpage, default=None) episode_paths = re.findall( r'(?s)<li id="showview_videos_media_(\d+)"[^>]+>.*?<a href="([^"]+)"', webpage) entries = [ self.url_result('http://www.crunchyroll.com' + ep, 'Crunchyroll', ep_id) for ep_id, ep in episode_paths ] entries.reverse() return { '_type': 'playlist', 'id': show_id, 'title': title, 'entries': entries, }
unlicense
cgstudiomap/cgstudiomap
main/eggs/python_stdnum-1.2-py2.7.egg/stdnum/ean.py
4
2451
# ean.py - functions for handling EANs # # Copyright (C) 2011, 2012, 2013 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """EAN (International Article Number). Module for handling EAN (International Article Number) codes. This module handles numbers EAN-13, EAN-8 and UPC (12-digit) format. >>> validate('73513537') '73513537' >>> validate('978-0-471-11709-4') # EAN-13 format '9780471117094' """ from stdnum.exceptions import * from stdnum.util import clean def compact(number): """Convert the EAN to the minimal representation. This strips the number of any valid separators and removes surrounding whitespace.""" return clean(number, ' -').strip() def calc_check_digit(number): """Calculate the EAN check digit for 13-digit numbers. The number passed should not have the check bit included.""" return str((10 - sum((3 - 2 * (i % 2)) * int(n) for i, n in enumerate(reversed(number)))) % 10) def validate(number): """Checks to see if the number provided is a valid EAN-13. This checks the length and the check bit but does not check whether a known GS1 Prefix and company identifier are referenced.""" number = compact(number) if not number.isdigit(): raise InvalidFormat() if len(number) not in (13, 12, 8): raise InvalidLength() if calc_check_digit(number[:-1]) != number[-1]: raise InvalidChecksum() return number def is_valid(number): """Checks to see if the number provided is a valid EAN-13. This checks the length and the check bit but does not check whether a known GS1 Prefix and company identifier are referenced.""" try: return bool(validate(number)) except ValidationError: return False
agpl-3.0
hgl888/chromium-crosswalk-efl
tools/deep_memory_profiler/PRESUBMIT.py
116
1425
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Top-level presubmit script for deep_memory_profiler. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for details on the presubmit API built into gcl. """ def CommonChecks(input_api, output_api): import sys def join(*args): return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) output = [] sys_path_backup = sys.path try: sys.path = [ join('..', 'find_runtime_symbols'), ] + sys.path output.extend(input_api.canned_checks.RunPylint(input_api, output_api)) finally: sys.path = sys_path_backup output.extend( input_api.canned_checks.RunUnitTestsInDirectory( input_api, output_api, input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'), whitelist=[r'.+_test\.py$'])) if input_api.is_committing: output.extend(input_api.canned_checks.PanProjectChecks(input_api, output_api, owners_check=False)) return output def CheckChangeOnUpload(input_api, output_api): return CommonChecks(input_api, output_api) def CheckChangeOnCommit(input_api, output_api): return CommonChecks(input_api, output_api)
bsd-3-clause
40023154/2015cd_midterm
static/Brython3.1.1-20150328-091302/Lib/external_import.py
742
2985
import os from browser import doc import urllib.request ## this module is able to download modules that are external to ## localhost/src ## so we could download from any URL class ModuleFinder: def __init__(self, path_entry): print("external_import here..") #print(path_entry) self._module=None if path_entry.startswith('http://'): self.path_entry=path_entry else: raise ImportError() def __str__(self): return '<%s for "%s">' % (self.__class__.__name__, self.path_entry) def find_module(self, fullname, path=None): path = path or self.path_entry #print('looking for "%s" in %s ...' % (fullname, path)) for _ext in ['js', 'pyj', 'py']: _fp,_url,_headers=urllib.request.urlopen(path + '/' + '%s.%s' % (fullname, _ext)) self._module=_fp.read() _fp.close() if self._module is not None: print("module found at %s:%s" % (path, fullname)) return ModuleLoader(path, fullname, self._module) print('module %s not found' % fullname) raise ImportError() return None class ModuleLoader: """Load source for modules""" def __init__(self, filepath, name, module_source): self._filepath=filepath self._name=name self._module_source=module_source def get_source(self): return self._module_source def is_package(self): return '.' in self._name def load_module(self): if self._name in sys.modules: #print('reusing existing module from previous import of "%s"' % fullname) mod = sys.modules[self._name] return mod _src=self.get_source() if self._filepath.endswith('.js'): mod=JSObject(import_js_module(_src, self._filepath, self._name)) elif self._filepath.endswith('.py'): mod=JSObject(import_py_module(_src, self._filepath, self._name)) elif self._filepath.endswith('.pyj'): mod=JSObject(import_pyj_module(_src, self._filepath, self._name)) else: raise ImportError('Invalid Module: %s' % self._filepath) # Set a few properties required by PEP 302 mod.__file__ = self._filepath mod.__name__ = self._name mod.__path__ = os.path.abspath(self._filepath) mod.__loader__ = self mod.__package__ = '.'.join(self._name.split('.')[:-1]) if self.is_package(): print('adding path for package') # Set __path__ for packages # so we can find the sub-modules. mod.__path__ = [ self._filepath ] else: print('imported as regular module') print('creating a new module object for "%s"' % self._name) sys.modules.setdefault(self._name, mod) JSObject(__BRYTHON__.imported)[self._name]=mod return mod
gpl-2.0
mgraffg/simplegp
examples/simplify.py
1
2421
from SimpleGP import GP import numpy as np seed = 0 # if len(sys.argv) == 1 else int(sys.argv[1]) x = np.linspace(0, 1, 100) pol = np.array([0.2, -0.3, 0.2]) X = np.vstack((x**2, x, np.ones(x.shape[0]))) y = (X.T * pol).sum(axis=1) gp = GP(popsize=10, generations=100000, verbose=True, verbose_nind=1000, min_length=1, do_simplify=True, func=["+", "-", "*", "/", 'abs', 'exp', 'sqrt', 'sin', 'cos', 'sigmoid', 'if', 'max', 'min', 'ln', 'sq'], min_depth=0, fname_best='regression.npy', seed=seed, nrandom=100, pxo=0.2, pgrow=0.5, walltime=None) gp.create_random_constants() x = x[:, np.newaxis] gp.train(x, y) gp.create_population() nvar = gp._nop.shape[0] ind = np.array([2, 3, 0, 0, nvar, nvar, 1, nvar, nvar, 0, 1, nvar, nvar, 2, nvar, nvar, 1, 3, nvar, nvar, 3, nvar, nvar], dtype=np.int) print gp.print_infix(ind) ind2 = gp.simplify(ind) print gp.print_infix(ind2, constants=gp._ind_generated_c) ind = np.array([1, 0, 3, nvar, nvar, 1, nvar, nvar, 3, 2, nvar, nvar, 2, nvar, nvar], dtype=np.int) print gp.print_infix(ind) ind2 = gp.simplify(ind) print gp.print_infix(ind2, constants=gp._ind_generated_c) print ind2 ind = np.array([13, 5, 2, nvar, nvar], dtype=np.int) print gp.print_infix(ind, constants=gp._ind_generated_c) ind2 = gp.simplify(ind) print gp.print_infix(ind2, constants=gp._ind_generated_c) ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int) print gp.print_infix(ind, constants=gp._ind_generated_c) ind2 = gp.simplify(ind) print gp.print_infix(ind2, constants=gp._ind_generated_c) ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int) print gp.print_infix(ind, constants=gp._ind_generated_c) ind2 = gp.simplify(ind) print gp.print_infix(ind2, constants=gp._ind_generated_c) gp._p[0] = np.array([0, 2, nvar, nvar+2, nvar+1], dtype=np.int) gp._p_constants[0] = np.array([0, 1.4]) print gp.print_infix(0) gp.simplify(0) print gp.print_infix(0) == "(X0 * 1.4)" gp._p[0] = np.array([0, nvar+1, 2, nvar, nvar+2], dtype=np.int) gp._p_constants[0] = np.array([0, 1.4]) print gp.print_infix(0) gp.simplify(0) print gp.print_infix(0) == "(X0 * 1.4)" gp._p[0] = np.array([1, 0, 2, nvar, nvar+2, nvar+1, 2, nvar, nvar+2], dtype=np.int) gp._p_constants[0] = np.array([0, 1.4]) print gp.print_infix(0) gp.simplify(0) print gp.print_infix(0)
apache-2.0
ity/pants
contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/file_excluder.py
15
1436
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os import re from pants.base.exceptions import TaskError class FileExcluder(object): def __init__(self, excludes_path, log): self.excludes = {} if excludes_path: if not os.path.exists(excludes_path): raise TaskError('Excludes file does not exist: {0}'.format(excludes_path)) with open(excludes_path) as fh: for line in fh.readlines(): if line and not line.startswith('#') and '::' in line: pattern, plugins = line.strip().split('::', 2) plugins = plugins.split() self.excludes[pattern] = { 'regex': re.compile(pattern), 'plugins': plugins } log.debug('Exclude pattern: {pattern}'.format(pattern=pattern)) else: log.debug('No excludes file specified. All python sources will be checked.') def should_include(self, source_filename, plugin): for exclude_rule in self.excludes.values(): if exclude_rule['regex'].match(source_filename) and ( (exclude_rule['plugins'] == ['.*']) or (plugin in exclude_rule['plugins']) ): return False return True
apache-2.0
danielhultqvist/mario
lib/gmock-1.6.0/gtest/test/gtest_help_test.py
2968
5856
#!/usr/bin/env python # # Copyright 2009, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests the --help flag of Google C++ Testing Framework. SYNOPSIS gtest_help_test.py --build_dir=BUILD/DIR # where BUILD/DIR contains the built gtest_help_test_ file. gtest_help_test.py """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import gtest_test_utils IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux' IS_WINDOWS = os.name == 'nt' PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_') FLAG_PREFIX = '--gtest_' DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style' STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to' UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing' LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests' INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG), re.sub('^--', '/', LIST_TESTS_FLAG), re.sub('_', '-', LIST_TESTS_FLAG)] INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing' SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess( [PROGRAM_PATH, LIST_TESTS_FLAG]).output # The help message must match this regex. HELP_REGEX = re.compile( FLAG_PREFIX + r'list_tests.*' + FLAG_PREFIX + r'filter=.*' + FLAG_PREFIX + r'also_run_disabled_tests.*' + FLAG_PREFIX + r'repeat=.*' + FLAG_PREFIX + r'shuffle.*' + FLAG_PREFIX + r'random_seed=.*' + FLAG_PREFIX + r'color=.*' + FLAG_PREFIX + r'print_time.*' + FLAG_PREFIX + r'output=.*' + FLAG_PREFIX + r'break_on_failure.*' + FLAG_PREFIX + r'throw_on_failure.*' + FLAG_PREFIX + r'catch_exceptions=0.*', re.DOTALL) def RunWithFlag(flag): """Runs gtest_help_test_ with the given flag. Returns: the exit code and the text output as a tuple. Args: flag: the command-line flag to pass to gtest_help_test_, or None. """ if flag is None: command = [PROGRAM_PATH] else: command = [PROGRAM_PATH, flag] child = gtest_test_utils.Subprocess(command) return child.exit_code, child.output class GTestHelpTest(gtest_test_utils.TestCase): """Tests the --help flag and its equivalent forms.""" def TestHelpFlag(self, flag): """Verifies correct behavior when help flag is specified. The right message must be printed and the tests must skipped when the given flag is specified. Args: flag: A flag to pass to the binary or None. """ exit_code, output = RunWithFlag(flag) self.assertEquals(0, exit_code) self.assert_(HELP_REGEX.search(output), output) if IS_LINUX: self.assert_(STREAM_RESULT_TO_FLAG in output, output) else: self.assert_(STREAM_RESULT_TO_FLAG not in output, output) if SUPPORTS_DEATH_TESTS and not IS_WINDOWS: self.assert_(DEATH_TEST_STYLE_FLAG in output, output) else: self.assert_(DEATH_TEST_STYLE_FLAG not in output, output) def TestNonHelpFlag(self, flag): """Verifies correct behavior when no help flag is specified. Verifies that when no help flag is specified, the tests are run and the help message is not printed. Args: flag: A flag to pass to the binary or None. """ exit_code, output = RunWithFlag(flag) self.assert_(exit_code != 0) self.assert_(not HELP_REGEX.search(output), output) def testPrintsHelpWithFullFlag(self): self.TestHelpFlag('--help') def testPrintsHelpWithShortFlag(self): self.TestHelpFlag('-h') def testPrintsHelpWithQuestionFlag(self): self.TestHelpFlag('-?') def testPrintsHelpWithWindowsStyleQuestionFlag(self): self.TestHelpFlag('/?') def testPrintsHelpWithUnrecognizedGoogleTestFlag(self): self.TestHelpFlag(UNKNOWN_FLAG) def testPrintsHelpWithIncorrectFlagStyle(self): for incorrect_flag in INCORRECT_FLAG_VARIANTS: self.TestHelpFlag(incorrect_flag) def testRunsTestsWithoutHelpFlag(self): """Verifies that when no help flag is specified, the tests are run and the help message is not printed.""" self.TestNonHelpFlag(None) def testRunsTestsWithGtestInternalFlag(self): """Verifies that the tests are run and no help message is printed when a flag starting with Google Test prefix and 'internal_' is supplied.""" self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING) if __name__ == '__main__': gtest_test_utils.Main()
mit
nhicher/ansible
lib/ansible/module_utils/redhat.py
141
10351
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c), James Laska # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os import re import shutil import tempfile import types from ansible.module_utils.six.moves import configparser class RegistrationBase(object): def __init__(self, module, username=None, password=None): self.module = module self.username = username self.password = password def configure(self): raise NotImplementedError("Must be implemented by a sub-class") def enable(self): # Remove any existing redhat.repo redhat_repo = '/etc/yum.repos.d/redhat.repo' if os.path.isfile(redhat_repo): os.unlink(redhat_repo) def register(self): raise NotImplementedError("Must be implemented by a sub-class") def unregister(self): raise NotImplementedError("Must be implemented by a sub-class") def unsubscribe(self): raise NotImplementedError("Must be implemented by a sub-class") def update_plugin_conf(self, plugin, enabled=True): plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin if os.path.isfile(plugin_conf): tmpfd, tmpfile = tempfile.mkstemp() shutil.copy2(plugin_conf, tmpfile) cfg = configparser.ConfigParser() cfg.read([tmpfile]) if enabled: cfg.set('main', 'enabled', 1) else: cfg.set('main', 'enabled', 0) fd = open(tmpfile, 'w+') cfg.write(fd) fd.close() self.module.atomic_move(tmpfile, plugin_conf) def subscribe(self, **kwargs): raise NotImplementedError("Must be implemented by a sub-class") class Rhsm(RegistrationBase): def __init__(self, module, username=None, password=None): RegistrationBase.__init__(self, module, username, password) self.config = self._read_config() self.module = module def _read_config(self, rhsm_conf='/etc/rhsm/rhsm.conf'): ''' Load RHSM configuration from /etc/rhsm/rhsm.conf. Returns: * ConfigParser object ''' # Read RHSM defaults ... cp = configparser.ConfigParser() cp.read(rhsm_conf) # Add support for specifying a default value w/o having to standup some configuration # Yeah, I know this should be subclassed ... but, oh well def get_option_default(self, key, default=''): sect, opt = key.split('.', 1) if self.has_section(sect) and self.has_option(sect, opt): return self.get(sect, opt) else: return default cp.get_option = types.MethodType(get_option_default, cp, configparser.ConfigParser) return cp def enable(self): ''' Enable the system to receive updates from subscription-manager. This involves updating affected yum plugins and removing any conflicting yum repositories. ''' RegistrationBase.enable(self) self.update_plugin_conf('rhnplugin', False) self.update_plugin_conf('subscription-manager', True) def configure(self, **kwargs): ''' Configure the system as directed for registration with RHN Raises: * Exception - if error occurs while running command ''' args = ['subscription-manager', 'config'] # Pass supplied **kwargs as parameters to subscription-manager. Ignore # non-configuration parameters and replace '_' with '.'. For example, # 'server_hostname' becomes '--system.hostname'. for k, v in kwargs.items(): if re.search(r'^(system|rhsm)_', k): args.append('--%s=%s' % (k.replace('_', '.'), v)) self.module.run_command(args, check_rc=True) @property def is_registered(self): ''' Determine whether the current system Returns: * Boolean - whether the current system is currently registered to RHN. ''' args = ['subscription-manager', 'identity'] rc, stdout, stderr = self.module.run_command(args, check_rc=False) if rc == 0: return True else: return False def register(self, username, password, autosubscribe, activationkey): ''' Register the current system to the provided RHN server Raises: * Exception - if error occurs while running command ''' args = ['subscription-manager', 'register'] # Generate command arguments if activationkey: args.append('--activationkey "%s"' % activationkey) else: if autosubscribe: args.append('--autosubscribe') if username: args.extend(['--username', username]) if password: args.extend(['--password', password]) # Do the needful... rc, stderr, stdout = self.module.run_command(args, check_rc=True) def unsubscribe(self): ''' Unsubscribe a system from all subscribed channels Raises: * Exception - if error occurs while running command ''' args = ['subscription-manager', 'unsubscribe', '--all'] rc, stderr, stdout = self.module.run_command(args, check_rc=True) def unregister(self): ''' Unregister a currently registered system Raises: * Exception - if error occurs while running command ''' args = ['subscription-manager', 'unregister'] rc, stderr, stdout = self.module.run_command(args, check_rc=True) self.update_plugin_conf('rhnplugin', False) self.update_plugin_conf('subscription-manager', False) def subscribe(self, regexp): ''' Subscribe current system to available pools matching the specified regular expression Raises: * Exception - if error occurs while running command ''' # Available pools ready for subscription available_pools = RhsmPools(self.module) for pool in available_pools.filter(regexp): pool.subscribe() class RhsmPool(object): ''' Convenience class for housing subscription information ''' def __init__(self, module, **kwargs): self.module = module for k, v in kwargs.items(): setattr(self, k, v) def __str__(self): return str(self.__getattribute__('_name')) def subscribe(self): args = "subscription-manager subscribe --pool %s" % self.PoolId rc, stdout, stderr = self.module.run_command(args, check_rc=True) if rc == 0: return True else: return False class RhsmPools(object): """ This class is used for manipulating pools subscriptions with RHSM """ def __init__(self, module): self.module = module self.products = self._load_product_list() def __iter__(self): return self.products.__iter__() def _load_product_list(self): """ Loads list of all available pools for system in data structure """ args = "subscription-manager list --available" rc, stdout, stderr = self.module.run_command(args, check_rc=True) products = [] for line in stdout.split('\n'): # Remove leading+trailing whitespace line = line.strip() # An empty line implies the end of an output group if len(line) == 0: continue # If a colon ':' is found, parse elif ':' in line: (key, value) = line.split(':', 1) key = key.strip().replace(" ", "") # To unify value = value.strip() if key in ['ProductName', 'SubscriptionName']: # Remember the name for later processing products.append(RhsmPool(self.module, _name=value, key=value)) elif products: # Associate value with most recently recorded product products[-1].__setattr__(key, value) # FIXME - log some warning? # else: # warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value)) return products def filter(self, regexp='^$'): ''' Return a list of RhsmPools whose name matches the provided regular expression ''' r = re.compile(regexp) for product in self.products: if r.search(product._name): yield product
gpl-3.0
foodszhang/kbengine
kbe/res/scripts/common/Lib/test/test_csv.py
80
42401
# Copyright (C) 2001,2002 Python Software Foundation # csv package unit tests import io import sys import os import unittest from io import StringIO from tempfile import TemporaryFile import csv import gc from test import support class Test_Csv(unittest.TestCase): """ Test the underlying C csv parser in ways that are not appropriate from the high level interface. Further tests of this nature are done in TestDialectRegistry. """ def _test_arg_valid(self, ctor, arg): self.assertRaises(TypeError, ctor) self.assertRaises(TypeError, ctor, None) self.assertRaises(TypeError, ctor, arg, bad_attr = 0) self.assertRaises(TypeError, ctor, arg, delimiter = 0) self.assertRaises(TypeError, ctor, arg, delimiter = 'XX') self.assertRaises(csv.Error, ctor, arg, 'foo') self.assertRaises(TypeError, ctor, arg, delimiter=None) self.assertRaises(TypeError, ctor, arg, delimiter=1) self.assertRaises(TypeError, ctor, arg, quotechar=1) self.assertRaises(TypeError, ctor, arg, lineterminator=None) self.assertRaises(TypeError, ctor, arg, lineterminator=1) self.assertRaises(TypeError, ctor, arg, quoting=None) self.assertRaises(TypeError, ctor, arg, quoting=csv.QUOTE_ALL, quotechar='') self.assertRaises(TypeError, ctor, arg, quoting=csv.QUOTE_ALL, quotechar=None) def test_reader_arg_valid(self): self._test_arg_valid(csv.reader, []) def test_writer_arg_valid(self): self._test_arg_valid(csv.writer, StringIO()) def _test_default_attrs(self, ctor, *args): obj = ctor(*args) # Check defaults self.assertEqual(obj.dialect.delimiter, ',') self.assertEqual(obj.dialect.doublequote, True) self.assertEqual(obj.dialect.escapechar, None) self.assertEqual(obj.dialect.lineterminator, "\r\n") self.assertEqual(obj.dialect.quotechar, '"') self.assertEqual(obj.dialect.quoting, csv.QUOTE_MINIMAL) self.assertEqual(obj.dialect.skipinitialspace, False) self.assertEqual(obj.dialect.strict, False) # Try deleting or changing attributes (they are read-only) self.assertRaises(AttributeError, delattr, obj.dialect, 'delimiter') self.assertRaises(AttributeError, setattr, obj.dialect, 'delimiter', ':') self.assertRaises(AttributeError, delattr, obj.dialect, 'quoting') self.assertRaises(AttributeError, setattr, obj.dialect, 'quoting', None) def test_reader_attrs(self): self._test_default_attrs(csv.reader, []) def test_writer_attrs(self): self._test_default_attrs(csv.writer, StringIO()) def _test_kw_attrs(self, ctor, *args): # Now try with alternate options kwargs = dict(delimiter=':', doublequote=False, escapechar='\\', lineterminator='\r', quotechar='*', quoting=csv.QUOTE_NONE, skipinitialspace=True, strict=True) obj = ctor(*args, **kwargs) self.assertEqual(obj.dialect.delimiter, ':') self.assertEqual(obj.dialect.doublequote, False) self.assertEqual(obj.dialect.escapechar, '\\') self.assertEqual(obj.dialect.lineterminator, "\r") self.assertEqual(obj.dialect.quotechar, '*') self.assertEqual(obj.dialect.quoting, csv.QUOTE_NONE) self.assertEqual(obj.dialect.skipinitialspace, True) self.assertEqual(obj.dialect.strict, True) def test_reader_kw_attrs(self): self._test_kw_attrs(csv.reader, []) def test_writer_kw_attrs(self): self._test_kw_attrs(csv.writer, StringIO()) def _test_dialect_attrs(self, ctor, *args): # Now try with dialect-derived options class dialect: delimiter='-' doublequote=False escapechar='^' lineterminator='$' quotechar='#' quoting=csv.QUOTE_ALL skipinitialspace=True strict=False args = args + (dialect,) obj = ctor(*args) self.assertEqual(obj.dialect.delimiter, '-') self.assertEqual(obj.dialect.doublequote, False) self.assertEqual(obj.dialect.escapechar, '^') self.assertEqual(obj.dialect.lineterminator, "$") self.assertEqual(obj.dialect.quotechar, '#') self.assertEqual(obj.dialect.quoting, csv.QUOTE_ALL) self.assertEqual(obj.dialect.skipinitialspace, True) self.assertEqual(obj.dialect.strict, False) def test_reader_dialect_attrs(self): self._test_dialect_attrs(csv.reader, []) def test_writer_dialect_attrs(self): self._test_dialect_attrs(csv.writer, StringIO()) def _write_test(self, fields, expect, **kwargs): with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, **kwargs) writer.writerow(fields) fileobj.seek(0) self.assertEqual(fileobj.read(), expect + writer.dialect.lineterminator) def test_write_arg_valid(self): self.assertRaises(csv.Error, self._write_test, None, '') self._write_test((), '') self._write_test([None], '""') self.assertRaises(csv.Error, self._write_test, [None], None, quoting = csv.QUOTE_NONE) # Check that exceptions are passed up the chain class BadList: def __len__(self): return 10; def __getitem__(self, i): if i > 2: raise OSError self.assertRaises(OSError, self._write_test, BadList(), '') class BadItem: def __str__(self): raise OSError self.assertRaises(OSError, self._write_test, [BadItem()], '') def test_write_bigfield(self): # This exercises the buffer realloc functionality bigstring = 'X' * 50000 self._write_test([bigstring,bigstring], '%s,%s' % \ (bigstring, bigstring)) def test_write_quoting(self): self._write_test(['a',1,'p,q'], 'a,1,"p,q"') self.assertRaises(csv.Error, self._write_test, ['a',1,'p,q'], 'a,1,p,q', quoting = csv.QUOTE_NONE) self._write_test(['a',1,'p,q'], 'a,1,"p,q"', quoting = csv.QUOTE_MINIMAL) self._write_test(['a',1,'p,q'], '"a",1,"p,q"', quoting = csv.QUOTE_NONNUMERIC) self._write_test(['a',1,'p,q'], '"a","1","p,q"', quoting = csv.QUOTE_ALL) self._write_test(['a\nb',1], '"a\nb","1"', quoting = csv.QUOTE_ALL) def test_write_escape(self): self._write_test(['a',1,'p,q'], 'a,1,"p,q"', escapechar='\\') self.assertRaises(csv.Error, self._write_test, ['a',1,'p,"q"'], 'a,1,"p,\\"q\\""', escapechar=None, doublequote=False) self._write_test(['a',1,'p,"q"'], 'a,1,"p,\\"q\\""', escapechar='\\', doublequote = False) self._write_test(['"'], '""""', escapechar='\\', quoting = csv.QUOTE_MINIMAL) self._write_test(['"'], '\\"', escapechar='\\', quoting = csv.QUOTE_MINIMAL, doublequote = False) self._write_test(['"'], '\\"', escapechar='\\', quoting = csv.QUOTE_NONE) self._write_test(['a',1,'p,q'], 'a,1,p\\,q', escapechar='\\', quoting = csv.QUOTE_NONE) def test_writerows(self): class BrokenFile: def write(self, buf): raise OSError writer = csv.writer(BrokenFile()) self.assertRaises(OSError, writer.writerows, [['a']]) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj) self.assertRaises(TypeError, writer.writerows, None) writer.writerows([['a','b'],['c','d']]) fileobj.seek(0) self.assertEqual(fileobj.read(), "a,b\r\nc,d\r\n") @support.cpython_only def test_writerows_legacy_strings(self): import _testcapi c = _testcapi.unicode_legacy_string('a') with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj) writer.writerows([[c]]) fileobj.seek(0) self.assertEqual(fileobj.read(), "a\r\n") def _read_test(self, input, expect, **kwargs): reader = csv.reader(input, **kwargs) result = list(reader) self.assertEqual(result, expect) def test_read_oddinputs(self): self._read_test([], []) self._read_test([''], [[]]) self.assertRaises(csv.Error, self._read_test, ['"ab"c'], None, strict = 1) # cannot handle null bytes for the moment self.assertRaises(csv.Error, self._read_test, ['ab\0c'], None, strict = 1) self._read_test(['"ab"c'], [['abc']], doublequote = 0) self.assertRaises(csv.Error, self._read_test, [b'ab\0c'], None) def test_read_eol(self): self._read_test(['a,b'], [['a','b']]) self._read_test(['a,b\n'], [['a','b']]) self._read_test(['a,b\r\n'], [['a','b']]) self._read_test(['a,b\r'], [['a','b']]) self.assertRaises(csv.Error, self._read_test, ['a,b\rc,d'], []) self.assertRaises(csv.Error, self._read_test, ['a,b\nc,d'], []) self.assertRaises(csv.Error, self._read_test, ['a,b\r\nc,d'], []) def test_read_eof(self): self._read_test(['a,"'], [['a', '']]) self._read_test(['"a'], [['a']]) self._read_test(['^'], [['\n']], escapechar='^') self.assertRaises(csv.Error, self._read_test, ['a,"'], [], strict=True) self.assertRaises(csv.Error, self._read_test, ['"a'], [], strict=True) self.assertRaises(csv.Error, self._read_test, ['^'], [], escapechar='^', strict=True) def test_read_escape(self): self._read_test(['a,\\b,c'], [['a', 'b', 'c']], escapechar='\\') self._read_test(['a,b\\,c'], [['a', 'b,c']], escapechar='\\') self._read_test(['a,"b\\,c"'], [['a', 'b,c']], escapechar='\\') self._read_test(['a,"b,\\c"'], [['a', 'b,c']], escapechar='\\') self._read_test(['a,"b,c\\""'], [['a', 'b,c"']], escapechar='\\') self._read_test(['a,"b,c"\\'], [['a', 'b,c\\']], escapechar='\\') def test_read_quoting(self): self._read_test(['1,",3,",5'], [['1', ',3,', '5']]) self._read_test(['1,",3,",5'], [['1', '"', '3', '"', '5']], quotechar=None, escapechar='\\') self._read_test(['1,",3,",5'], [['1', '"', '3', '"', '5']], quoting=csv.QUOTE_NONE, escapechar='\\') # will this fail where locale uses comma for decimals? self._read_test([',3,"5",7.3, 9'], [['', 3, '5', 7.3, 9]], quoting=csv.QUOTE_NONNUMERIC) self._read_test(['"a\nb", 7'], [['a\nb', ' 7']]) self.assertRaises(ValueError, self._read_test, ['abc,3'], [[]], quoting=csv.QUOTE_NONNUMERIC) def test_read_bigfield(self): # This exercises the buffer realloc functionality and field size # limits. limit = csv.field_size_limit() try: size = 50000 bigstring = 'X' * size bigline = '%s,%s' % (bigstring, bigstring) self._read_test([bigline], [[bigstring, bigstring]]) csv.field_size_limit(size) self._read_test([bigline], [[bigstring, bigstring]]) self.assertEqual(csv.field_size_limit(), size) csv.field_size_limit(size-1) self.assertRaises(csv.Error, self._read_test, [bigline], []) self.assertRaises(TypeError, csv.field_size_limit, None) self.assertRaises(TypeError, csv.field_size_limit, 1, None) finally: csv.field_size_limit(limit) def test_read_linenum(self): r = csv.reader(['line,1', 'line,2', 'line,3']) self.assertEqual(r.line_num, 0) next(r) self.assertEqual(r.line_num, 1) next(r) self.assertEqual(r.line_num, 2) next(r) self.assertEqual(r.line_num, 3) self.assertRaises(StopIteration, next, r) self.assertEqual(r.line_num, 3) def test_roundtrip_quoteed_newlines(self): with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj) self.assertRaises(TypeError, writer.writerows, None) rows = [['a\nb','b'],['c','x\r\nd']] writer.writerows(rows) fileobj.seek(0) for i, row in enumerate(csv.reader(fileobj)): self.assertEqual(row, rows[i]) def test_roundtrip_escaped_unquoted_newlines(self): with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\") rows = [['a\nb','b'],['c','x\r\nd']] writer.writerows(rows) fileobj.seek(0) for i, row in enumerate(csv.reader(fileobj,quoting=csv.QUOTE_NONE,escapechar="\\")): self.assertEqual(row,rows[i]) class TestDialectRegistry(unittest.TestCase): def test_registry_badargs(self): self.assertRaises(TypeError, csv.list_dialects, None) self.assertRaises(TypeError, csv.get_dialect) self.assertRaises(csv.Error, csv.get_dialect, None) self.assertRaises(csv.Error, csv.get_dialect, "nonesuch") self.assertRaises(TypeError, csv.unregister_dialect) self.assertRaises(csv.Error, csv.unregister_dialect, None) self.assertRaises(csv.Error, csv.unregister_dialect, "nonesuch") self.assertRaises(TypeError, csv.register_dialect, None) self.assertRaises(TypeError, csv.register_dialect, None, None) self.assertRaises(TypeError, csv.register_dialect, "nonesuch", 0, 0) self.assertRaises(TypeError, csv.register_dialect, "nonesuch", badargument=None) self.assertRaises(TypeError, csv.register_dialect, "nonesuch", quoting=None) self.assertRaises(TypeError, csv.register_dialect, []) def test_registry(self): class myexceltsv(csv.excel): delimiter = "\t" name = "myexceltsv" expected_dialects = csv.list_dialects() + [name] expected_dialects.sort() csv.register_dialect(name, myexceltsv) self.addCleanup(csv.unregister_dialect, name) self.assertEqual(csv.get_dialect(name).delimiter, '\t') got_dialects = sorted(csv.list_dialects()) self.assertEqual(expected_dialects, got_dialects) def test_register_kwargs(self): name = 'fedcba' csv.register_dialect(name, delimiter=';') self.addCleanup(csv.unregister_dialect, name) self.assertEqual(csv.get_dialect(name).delimiter, ';') self.assertEqual([['X', 'Y', 'Z']], list(csv.reader(['X;Y;Z'], name))) def test_incomplete_dialect(self): class myexceltsv(csv.Dialect): delimiter = "\t" self.assertRaises(csv.Error, myexceltsv) def test_space_dialect(self): class space(csv.excel): delimiter = " " quoting = csv.QUOTE_NONE escapechar = "\\" with TemporaryFile("w+") as fileobj: fileobj.write("abc def\nc1ccccc1 benzene\n") fileobj.seek(0) reader = csv.reader(fileobj, dialect=space()) self.assertEqual(next(reader), ["abc", "def"]) self.assertEqual(next(reader), ["c1ccccc1", "benzene"]) def compare_dialect_123(self, expected, *writeargs, **kwwriteargs): with TemporaryFile("w+", newline='', encoding="utf-8") as fileobj: writer = csv.writer(fileobj, *writeargs, **kwwriteargs) writer.writerow([1,2,3]) fileobj.seek(0) self.assertEqual(fileobj.read(), expected) def test_dialect_apply(self): class testA(csv.excel): delimiter = "\t" class testB(csv.excel): delimiter = ":" class testC(csv.excel): delimiter = "|" class testUni(csv.excel): delimiter = "\u039B" csv.register_dialect('testC', testC) try: self.compare_dialect_123("1,2,3\r\n") self.compare_dialect_123("1\t2\t3\r\n", testA) self.compare_dialect_123("1:2:3\r\n", dialect=testB()) self.compare_dialect_123("1|2|3\r\n", dialect='testC') self.compare_dialect_123("1;2;3\r\n", dialect=testA, delimiter=';') self.compare_dialect_123("1\u039B2\u039B3\r\n", dialect=testUni) finally: csv.unregister_dialect('testC') def test_bad_dialect(self): # Unknown parameter self.assertRaises(TypeError, csv.reader, [], bad_attr = 0) # Bad values self.assertRaises(TypeError, csv.reader, [], delimiter = None) self.assertRaises(TypeError, csv.reader, [], quoting = -1) self.assertRaises(TypeError, csv.reader, [], quoting = 100) class TestCsvBase(unittest.TestCase): def readerAssertEqual(self, input, expected_result): with TemporaryFile("w+", newline='') as fileobj: fileobj.write(input) fileobj.seek(0) reader = csv.reader(fileobj, dialect = self.dialect) fields = list(reader) self.assertEqual(fields, expected_result) def writerAssertEqual(self, input, expected_result): with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, dialect = self.dialect) writer.writerows(input) fileobj.seek(0) self.assertEqual(fileobj.read(), expected_result) class TestDialectExcel(TestCsvBase): dialect = 'excel' def test_single(self): self.readerAssertEqual('abc', [['abc']]) def test_simple(self): self.readerAssertEqual('1,2,3,4,5', [['1','2','3','4','5']]) def test_blankline(self): self.readerAssertEqual('', []) def test_empty_fields(self): self.readerAssertEqual(',', [['', '']]) def test_singlequoted(self): self.readerAssertEqual('""', [['']]) def test_singlequoted_left_empty(self): self.readerAssertEqual('"",', [['','']]) def test_singlequoted_right_empty(self): self.readerAssertEqual(',""', [['','']]) def test_single_quoted_quote(self): self.readerAssertEqual('""""', [['"']]) def test_quoted_quotes(self): self.readerAssertEqual('""""""', [['""']]) def test_inline_quote(self): self.readerAssertEqual('a""b', [['a""b']]) def test_inline_quotes(self): self.readerAssertEqual('a"b"c', [['a"b"c']]) def test_quotes_and_more(self): # Excel would never write a field containing '"a"b', but when # reading one, it will return 'ab'. self.readerAssertEqual('"a"b', [['ab']]) def test_lone_quote(self): self.readerAssertEqual('a"b', [['a"b']]) def test_quote_and_quote(self): # Excel would never write a field containing '"a" "b"', but when # reading one, it will return 'a "b"'. self.readerAssertEqual('"a" "b"', [['a "b"']]) def test_space_and_quote(self): self.readerAssertEqual(' "a"', [[' "a"']]) def test_quoted(self): self.readerAssertEqual('1,2,3,"I think, therefore I am",5,6', [['1', '2', '3', 'I think, therefore I am', '5', '6']]) def test_quoted_quote(self): self.readerAssertEqual('1,2,3,"""I see,"" said the blind man","as he picked up his hammer and saw"', [['1', '2', '3', '"I see," said the blind man', 'as he picked up his hammer and saw']]) def test_quoted_nl(self): input = '''\ 1,2,3,"""I see,"" said the blind man","as he picked up his hammer and saw" 9,8,7,6''' self.readerAssertEqual(input, [['1', '2', '3', '"I see,"\nsaid the blind man', 'as he picked up his\nhammer and saw'], ['9','8','7','6']]) def test_dubious_quote(self): self.readerAssertEqual('12,12,1",', [['12', '12', '1"', '']]) def test_null(self): self.writerAssertEqual([], '') def test_single_writer(self): self.writerAssertEqual([['abc']], 'abc\r\n') def test_simple_writer(self): self.writerAssertEqual([[1, 2, 'abc', 3, 4]], '1,2,abc,3,4\r\n') def test_quotes(self): self.writerAssertEqual([[1, 2, 'a"bc"', 3, 4]], '1,2,"a""bc""",3,4\r\n') def test_quote_fieldsep(self): self.writerAssertEqual([['abc,def']], '"abc,def"\r\n') def test_newlines(self): self.writerAssertEqual([[1, 2, 'a\nbc', 3, 4]], '1,2,"a\nbc",3,4\r\n') class EscapedExcel(csv.excel): quoting = csv.QUOTE_NONE escapechar = '\\' class TestEscapedExcel(TestCsvBase): dialect = EscapedExcel() def test_escape_fieldsep(self): self.writerAssertEqual([['abc,def']], 'abc\\,def\r\n') def test_read_escape_fieldsep(self): self.readerAssertEqual('abc\\,def\r\n', [['abc,def']]) class TestDialectUnix(TestCsvBase): dialect = 'unix' def test_simple_writer(self): self.writerAssertEqual([[1, 'abc def', 'abc']], '"1","abc def","abc"\n') def test_simple_reader(self): self.readerAssertEqual('"1","abc def","abc"\n', [['1', 'abc def', 'abc']]) class QuotedEscapedExcel(csv.excel): quoting = csv.QUOTE_NONNUMERIC escapechar = '\\' class TestQuotedEscapedExcel(TestCsvBase): dialect = QuotedEscapedExcel() def test_write_escape_fieldsep(self): self.writerAssertEqual([['abc,def']], '"abc,def"\r\n') def test_read_escape_fieldsep(self): self.readerAssertEqual('"abc\\,def"\r\n', [['abc,def']]) class TestDictFields(unittest.TestCase): ### "long" means the row is longer than the number of fieldnames ### "short" means there are fewer elements in the row than fieldnames def test_write_simple_dict(self): with TemporaryFile("w+", newline='') as fileobj: writer = csv.DictWriter(fileobj, fieldnames = ["f1", "f2", "f3"]) writer.writeheader() fileobj.seek(0) self.assertEqual(fileobj.readline(), "f1,f2,f3\r\n") writer.writerow({"f1": 10, "f3": "abc"}) fileobj.seek(0) fileobj.readline() # header self.assertEqual(fileobj.read(), "10,,abc\r\n") def test_write_no_fields(self): fileobj = StringIO() self.assertRaises(TypeError, csv.DictWriter, fileobj) def test_write_fields_not_in_fieldnames(self): with TemporaryFile("w+", newline='') as fileobj: writer = csv.DictWriter(fileobj, fieldnames = ["f1", "f2", "f3"]) # Of special note is the non-string key (issue 19449) with self.assertRaises(ValueError) as cx: writer.writerow({"f4": 10, "f2": "spam", 1: "abc"}) exception = str(cx.exception) self.assertIn("fieldnames", exception) self.assertIn("'f4'", exception) self.assertNotIn("'f2'", exception) self.assertIn("1", exception) def test_read_dict_fields(self): with TemporaryFile("w+") as fileobj: fileobj.write("1,2,abc\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, fieldnames=["f1", "f2", "f3"]) self.assertEqual(next(reader), {"f1": '1', "f2": '2', "f3": 'abc'}) def test_read_dict_no_fieldnames(self): with TemporaryFile("w+") as fileobj: fileobj.write("f1,f2,f3\r\n1,2,abc\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj) self.assertEqual(next(reader), {"f1": '1', "f2": '2', "f3": 'abc'}) self.assertEqual(reader.fieldnames, ["f1", "f2", "f3"]) # Two test cases to make sure existing ways of implicitly setting # fieldnames continue to work. Both arise from discussion in issue3436. def test_read_dict_fieldnames_from_file(self): with TemporaryFile("w+") as fileobj: fileobj.write("f1,f2,f3\r\n1,2,abc\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, fieldnames=next(csv.reader(fileobj))) self.assertEqual(reader.fieldnames, ["f1", "f2", "f3"]) self.assertEqual(next(reader), {"f1": '1', "f2": '2', "f3": 'abc'}) def test_read_dict_fieldnames_chain(self): import itertools with TemporaryFile("w+") as fileobj: fileobj.write("f1,f2,f3\r\n1,2,abc\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj) first = next(reader) for row in itertools.chain([first], reader): self.assertEqual(reader.fieldnames, ["f1", "f2", "f3"]) self.assertEqual(row, {"f1": '1', "f2": '2', "f3": 'abc'}) def test_read_long(self): with TemporaryFile("w+") as fileobj: fileobj.write("1,2,abc,4,5,6\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, fieldnames=["f1", "f2"]) self.assertEqual(next(reader), {"f1": '1', "f2": '2', None: ["abc", "4", "5", "6"]}) def test_read_long_with_rest(self): with TemporaryFile("w+") as fileobj: fileobj.write("1,2,abc,4,5,6\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, fieldnames=["f1", "f2"], restkey="_rest") self.assertEqual(next(reader), {"f1": '1', "f2": '2', "_rest": ["abc", "4", "5", "6"]}) def test_read_long_with_rest_no_fieldnames(self): with TemporaryFile("w+") as fileobj: fileobj.write("f1,f2\r\n1,2,abc,4,5,6\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, restkey="_rest") self.assertEqual(reader.fieldnames, ["f1", "f2"]) self.assertEqual(next(reader), {"f1": '1', "f2": '2', "_rest": ["abc", "4", "5", "6"]}) def test_read_short(self): with TemporaryFile("w+") as fileobj: fileobj.write("1,2,abc,4,5,6\r\n1,2,abc\r\n") fileobj.seek(0) reader = csv.DictReader(fileobj, fieldnames="1 2 3 4 5 6".split(), restval="DEFAULT") self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', "4": '4', "5": '5', "6": '6'}) self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', "4": 'DEFAULT', "5": 'DEFAULT', "6": 'DEFAULT'}) def test_read_multi(self): sample = [ '2147483648,43.0e12,17,abc,def\r\n', '147483648,43.0e2,17,abc,def\r\n', '47483648,43.0,170,abc,def\r\n' ] reader = csv.DictReader(sample, fieldnames="i1 float i2 s1 s2".split()) self.assertEqual(next(reader), {"i1": '2147483648', "float": '43.0e12', "i2": '17', "s1": 'abc', "s2": 'def'}) def test_read_with_blanks(self): reader = csv.DictReader(["1,2,abc,4,5,6\r\n","\r\n", "1,2,abc,4,5,6\r\n"], fieldnames="1 2 3 4 5 6".split()) self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', "4": '4', "5": '5', "6": '6'}) self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', "4": '4', "5": '5', "6": '6'}) def test_read_semi_sep(self): reader = csv.DictReader(["1;2;abc;4;5;6\r\n"], fieldnames="1 2 3 4 5 6".split(), delimiter=';') self.assertEqual(next(reader), {"1": '1', "2": '2', "3": 'abc', "4": '4', "5": '5', "6": '6'}) class TestArrayWrites(unittest.TestCase): def test_int_write(self): import array contents = [(20-i) for i in range(20)] a = array.array('i', contents) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, dialect="excel") writer.writerow(a) expected = ",".join([str(i) for i in a])+"\r\n" fileobj.seek(0) self.assertEqual(fileobj.read(), expected) def test_double_write(self): import array contents = [(20-i)*0.1 for i in range(20)] a = array.array('d', contents) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, dialect="excel") writer.writerow(a) expected = ",".join([str(i) for i in a])+"\r\n" fileobj.seek(0) self.assertEqual(fileobj.read(), expected) def test_float_write(self): import array contents = [(20-i)*0.1 for i in range(20)] a = array.array('f', contents) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, dialect="excel") writer.writerow(a) expected = ",".join([str(i) for i in a])+"\r\n" fileobj.seek(0) self.assertEqual(fileobj.read(), expected) def test_char_write(self): import array, string a = array.array('u', string.ascii_letters) with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj, dialect="excel") writer.writerow(a) expected = ",".join(a)+"\r\n" fileobj.seek(0) self.assertEqual(fileobj.read(), expected) class TestDialectValidity(unittest.TestCase): def test_quoting(self): class mydialect(csv.Dialect): delimiter = ";" escapechar = '\\' doublequote = False skipinitialspace = True lineterminator = '\r\n' quoting = csv.QUOTE_NONE d = mydialect() self.assertEqual(d.quoting, csv.QUOTE_NONE) mydialect.quoting = None self.assertRaises(csv.Error, mydialect) mydialect.doublequote = True mydialect.quoting = csv.QUOTE_ALL mydialect.quotechar = '"' d = mydialect() self.assertEqual(d.quoting, csv.QUOTE_ALL) self.assertEqual(d.quotechar, '"') self.assertTrue(d.doublequote) mydialect.quotechar = "''" with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"quotechar" must be an 1-character string') mydialect.quotechar = 4 with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"quotechar" must be string, not int') def test_delimiter(self): class mydialect(csv.Dialect): delimiter = ";" escapechar = '\\' doublequote = False skipinitialspace = True lineterminator = '\r\n' quoting = csv.QUOTE_NONE d = mydialect() self.assertEqual(d.delimiter, ";") mydialect.delimiter = ":::" with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"delimiter" must be an 1-character string') mydialect.delimiter = "" with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"delimiter" must be an 1-character string') mydialect.delimiter = b"," with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"delimiter" must be string, not bytes') mydialect.delimiter = 4 with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"delimiter" must be string, not int') def test_lineterminator(self): class mydialect(csv.Dialect): delimiter = ";" escapechar = '\\' doublequote = False skipinitialspace = True lineterminator = '\r\n' quoting = csv.QUOTE_NONE d = mydialect() self.assertEqual(d.lineterminator, '\r\n') mydialect.lineterminator = ":::" d = mydialect() self.assertEqual(d.lineterminator, ":::") mydialect.lineterminator = 4 with self.assertRaises(csv.Error) as cm: mydialect() self.assertEqual(str(cm.exception), '"lineterminator" must be a string') def test_invalid_chars(self): def create_invalid(field_name, value): class mydialect(csv.Dialect): pass setattr(mydialect, field_name, value) d = mydialect() for field_name in ("delimiter", "escapechar", "quotechar"): with self.subTest(field_name=field_name): self.assertRaises(csv.Error, create_invalid, field_name, "") self.assertRaises(csv.Error, create_invalid, field_name, "abc") self.assertRaises(csv.Error, create_invalid, field_name, b'x') self.assertRaises(csv.Error, create_invalid, field_name, 5) class TestSniffer(unittest.TestCase): sample1 = """\ Harry's, Arlington Heights, IL, 2/1/03, Kimi Hayes Shark City, Glendale Heights, IL, 12/28/02, Prezence Tommy's Place, Blue Island, IL, 12/28/02, Blue Sunday/White Crow Stonecutters Seafood and Chop House, Lemont, IL, 12/19/02, Week Back """ sample2 = """\ 'Harry''s':'Arlington Heights':'IL':'2/1/03':'Kimi Hayes' 'Shark City':'Glendale Heights':'IL':'12/28/02':'Prezence' 'Tommy''s Place':'Blue Island':'IL':'12/28/02':'Blue Sunday/White Crow' 'Stonecutters ''Seafood'' and Chop House':'Lemont':'IL':'12/19/02':'Week Back' """ header1 = '''\ "venue","city","state","date","performers" ''' sample3 = '''\ 05/05/03?05/05/03?05/05/03?05/05/03?05/05/03?05/05/03 05/05/03?05/05/03?05/05/03?05/05/03?05/05/03?05/05/03 05/05/03?05/05/03?05/05/03?05/05/03?05/05/03?05/05/03 ''' sample4 = '''\ 2147483648;43.0e12;17;abc;def 147483648;43.0e2;17;abc;def 47483648;43.0;170;abc;def ''' sample5 = "aaa\tbbb\r\nAAA\t\r\nBBB\t\r\n" sample6 = "a|b|c\r\nd|e|f\r\n" sample7 = "'a'|'b'|'c'\r\n'd'|e|f\r\n" # Issue 18155: Use a delimiter that is a special char to regex: header2 = '''\ "venue"+"city"+"state"+"date"+"performers" ''' sample8 = """\ Harry's+ Arlington Heights+ IL+ 2/1/03+ Kimi Hayes Shark City+ Glendale Heights+ IL+ 12/28/02+ Prezence Tommy's Place+ Blue Island+ IL+ 12/28/02+ Blue Sunday/White Crow Stonecutters Seafood and Chop House+ Lemont+ IL+ 12/19/02+ Week Back """ sample9 = """\ 'Harry''s'+ Arlington Heights'+ 'IL'+ '2/1/03'+ 'Kimi Hayes' 'Shark City'+ Glendale Heights'+' IL'+ '12/28/02'+ 'Prezence' 'Tommy''s Place'+ Blue Island'+ 'IL'+ '12/28/02'+ 'Blue Sunday/White Crow' 'Stonecutters ''Seafood'' and Chop House'+ 'Lemont'+ 'IL'+ '12/19/02'+ 'Week Back' """ def test_has_header(self): sniffer = csv.Sniffer() self.assertEqual(sniffer.has_header(self.sample1), False) self.assertEqual(sniffer.has_header(self.header1 + self.sample1), True) def test_has_header_regex_special_delimiter(self): sniffer = csv.Sniffer() self.assertEqual(sniffer.has_header(self.sample8), False) self.assertEqual(sniffer.has_header(self.header2 + self.sample8), True) def test_sniff(self): sniffer = csv.Sniffer() dialect = sniffer.sniff(self.sample1) self.assertEqual(dialect.delimiter, ",") self.assertEqual(dialect.quotechar, '"') self.assertEqual(dialect.skipinitialspace, True) dialect = sniffer.sniff(self.sample2) self.assertEqual(dialect.delimiter, ":") self.assertEqual(dialect.quotechar, "'") self.assertEqual(dialect.skipinitialspace, False) def test_delimiters(self): sniffer = csv.Sniffer() dialect = sniffer.sniff(self.sample3) # given that all three lines in sample3 are equal, # I think that any character could have been 'guessed' as the # delimiter, depending on dictionary order self.assertIn(dialect.delimiter, self.sample3) dialect = sniffer.sniff(self.sample3, delimiters="?,") self.assertEqual(dialect.delimiter, "?") dialect = sniffer.sniff(self.sample3, delimiters="/,") self.assertEqual(dialect.delimiter, "/") dialect = sniffer.sniff(self.sample4) self.assertEqual(dialect.delimiter, ";") dialect = sniffer.sniff(self.sample5) self.assertEqual(dialect.delimiter, "\t") dialect = sniffer.sniff(self.sample6) self.assertEqual(dialect.delimiter, "|") dialect = sniffer.sniff(self.sample7) self.assertEqual(dialect.delimiter, "|") self.assertEqual(dialect.quotechar, "'") dialect = sniffer.sniff(self.sample8) self.assertEqual(dialect.delimiter, '+') dialect = sniffer.sniff(self.sample9) self.assertEqual(dialect.delimiter, '+') self.assertEqual(dialect.quotechar, "'") def test_doublequote(self): sniffer = csv.Sniffer() dialect = sniffer.sniff(self.header1) self.assertFalse(dialect.doublequote) dialect = sniffer.sniff(self.header2) self.assertFalse(dialect.doublequote) dialect = sniffer.sniff(self.sample2) self.assertTrue(dialect.doublequote) dialect = sniffer.sniff(self.sample8) self.assertFalse(dialect.doublequote) dialect = sniffer.sniff(self.sample9) self.assertTrue(dialect.doublequote) class NUL: def write(s, *args): pass writelines = write @unittest.skipUnless(hasattr(sys, "gettotalrefcount"), 'requires sys.gettotalrefcount()') class TestLeaks(unittest.TestCase): def test_create_read(self): delta = 0 lastrc = sys.gettotalrefcount() for i in range(20): gc.collect() self.assertEqual(gc.garbage, []) rc = sys.gettotalrefcount() csv.reader(["a,b,c\r\n"]) csv.reader(["a,b,c\r\n"]) csv.reader(["a,b,c\r\n"]) delta = rc-lastrc lastrc = rc # if csv.reader() leaks, last delta should be 3 or more self.assertEqual(delta < 3, True) def test_create_write(self): delta = 0 lastrc = sys.gettotalrefcount() s = NUL() for i in range(20): gc.collect() self.assertEqual(gc.garbage, []) rc = sys.gettotalrefcount() csv.writer(s) csv.writer(s) csv.writer(s) delta = rc-lastrc lastrc = rc # if csv.writer() leaks, last delta should be 3 or more self.assertEqual(delta < 3, True) def test_read(self): delta = 0 rows = ["a,b,c\r\n"]*5 lastrc = sys.gettotalrefcount() for i in range(20): gc.collect() self.assertEqual(gc.garbage, []) rc = sys.gettotalrefcount() rdr = csv.reader(rows) for row in rdr: pass delta = rc-lastrc lastrc = rc # if reader leaks during read, delta should be 5 or more self.assertEqual(delta < 5, True) def test_write(self): delta = 0 rows = [[1,2,3]]*5 s = NUL() lastrc = sys.gettotalrefcount() for i in range(20): gc.collect() self.assertEqual(gc.garbage, []) rc = sys.gettotalrefcount() writer = csv.writer(s) for row in rows: writer.writerow(row) delta = rc-lastrc lastrc = rc # if writer leaks during write, last delta should be 5 or more self.assertEqual(delta < 5, True) class TestUnicode(unittest.TestCase): names = ["Martin von Löwis", "Marc André Lemburg", "Guido van Rossum", "François Pinard"] def test_unicode_read(self): import io with TemporaryFile("w+", newline='', encoding="utf-8") as fileobj: fileobj.write(",".join(self.names) + "\r\n") fileobj.seek(0) reader = csv.reader(fileobj) self.assertEqual(list(reader), [self.names]) def test_unicode_write(self): import io with TemporaryFile("w+", newline='', encoding="utf-8") as fileobj: writer = csv.writer(fileobj) writer.writerow(self.names) expected = ",".join(self.names)+"\r\n" fileobj.seek(0) self.assertEqual(fileobj.read(), expected) def test_main(): mod = sys.modules[__name__] support.run_unittest( *[getattr(mod, name) for name in dir(mod) if name.startswith('Test')] ) if __name__ == '__main__': test_main()
lgpl-3.0
tswast/google-cloud-python
bigtable/tests/unit/gapic/v2/test_bigtable_client_v2.py
2
11731
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests.""" import mock import pytest from google.cloud import bigtable_v2 from google.cloud.bigtable_v2.proto import bigtable_pb2 class MultiCallableStub(object): """Stub for the grpc.UnaryUnaryMultiCallable interface.""" def __init__(self, method, channel_stub): self.method = method self.channel_stub = channel_stub def __call__(self, request, timeout=None, metadata=None, credentials=None): self.channel_stub.requests.append((self.method, request)) response = None if self.channel_stub.responses: response = self.channel_stub.responses.pop() if isinstance(response, Exception): raise response if response: return response class ChannelStub(object): """Stub for the grpc.Channel interface.""" def __init__(self, responses=[]): self.responses = responses self.requests = [] def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) def unary_stream(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) class CustomException(Exception): pass class TestBigtableClient(object): def test_read_rows(self): # Setup Expected Response last_scanned_row_key = b"-126" expected_response = {"last_scanned_row_key": last_scanned_row_key} expected_response = bigtable_pb2.ReadRowsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") response = client.read_rows(table_name) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = bigtable_pb2.ReadRowsRequest(table_name=table_name) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_read_rows_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") with pytest.raises(CustomException): client.read_rows(table_name) def test_sample_row_keys(self): # Setup Expected Response row_key = b"122" offset_bytes = 889884095 expected_response = {"row_key": row_key, "offset_bytes": offset_bytes} expected_response = bigtable_pb2.SampleRowKeysResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") response = client.sample_row_keys(table_name) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = bigtable_pb2.SampleRowKeysRequest(table_name=table_name) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_sample_row_keys_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") with pytest.raises(CustomException): client.sample_row_keys(table_name) def test_mutate_row(self): # Setup Expected Response expected_response = {} expected_response = bigtable_pb2.MutateRowResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" mutations = [] response = client.mutate_row(table_name, row_key, mutations) assert expected_response == response assert len(channel.requests) == 1 expected_request = bigtable_pb2.MutateRowRequest( table_name=table_name, row_key=row_key, mutations=mutations ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_mutate_row_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" mutations = [] with pytest.raises(CustomException): client.mutate_row(table_name, row_key, mutations) def test_mutate_rows(self): # Setup Expected Response expected_response = {} expected_response = bigtable_pb2.MutateRowsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") entries = [] response = client.mutate_rows(table_name, entries) resources = list(response) assert len(resources) == 1 assert expected_response == resources[0] assert len(channel.requests) == 1 expected_request = bigtable_pb2.MutateRowsRequest( table_name=table_name, entries=entries ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_mutate_rows_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") entries = [] with pytest.raises(CustomException): client.mutate_rows(table_name, entries) def test_check_and_mutate_row(self): # Setup Expected Response predicate_matched = True expected_response = {"predicate_matched": predicate_matched} expected_response = bigtable_pb2.CheckAndMutateRowResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" response = client.check_and_mutate_row(table_name, row_key) assert expected_response == response assert len(channel.requests) == 1 expected_request = bigtable_pb2.CheckAndMutateRowRequest( table_name=table_name, row_key=row_key ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_check_and_mutate_row_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" with pytest.raises(CustomException): client.check_and_mutate_row(table_name, row_key) def test_read_modify_write_row(self): # Setup Expected Response expected_response = {} expected_response = bigtable_pb2.ReadModifyWriteRowResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup Request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" rules = [] response = client.read_modify_write_row(table_name, row_key, rules) assert expected_response == response assert len(channel.requests) == 1 expected_request = bigtable_pb2.ReadModifyWriteRowRequest( table_name=table_name, row_key=row_key, rules=rules ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_read_modify_write_row_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = bigtable_v2.BigtableClient() # Setup request table_name = client.table_path("[PROJECT]", "[INSTANCE]", "[TABLE]") row_key = b"122" rules = [] with pytest.raises(CustomException): client.read_modify_write_row(table_name, row_key, rules)
apache-2.0
sunlianqiang/kbengine
kbe/res/scripts/common/Lib/encodings/cp1253.py
272
13094
""" Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp1253', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( '\x00' # 0x00 -> NULL '\x01' # 0x01 -> START OF HEADING '\x02' # 0x02 -> START OF TEXT '\x03' # 0x03 -> END OF TEXT '\x04' # 0x04 -> END OF TRANSMISSION '\x05' # 0x05 -> ENQUIRY '\x06' # 0x06 -> ACKNOWLEDGE '\x07' # 0x07 -> BELL '\x08' # 0x08 -> BACKSPACE '\t' # 0x09 -> HORIZONTAL TABULATION '\n' # 0x0A -> LINE FEED '\x0b' # 0x0B -> VERTICAL TABULATION '\x0c' # 0x0C -> FORM FEED '\r' # 0x0D -> CARRIAGE RETURN '\x0e' # 0x0E -> SHIFT OUT '\x0f' # 0x0F -> SHIFT IN '\x10' # 0x10 -> DATA LINK ESCAPE '\x11' # 0x11 -> DEVICE CONTROL ONE '\x12' # 0x12 -> DEVICE CONTROL TWO '\x13' # 0x13 -> DEVICE CONTROL THREE '\x14' # 0x14 -> DEVICE CONTROL FOUR '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE '\x16' # 0x16 -> SYNCHRONOUS IDLE '\x17' # 0x17 -> END OF TRANSMISSION BLOCK '\x18' # 0x18 -> CANCEL '\x19' # 0x19 -> END OF MEDIUM '\x1a' # 0x1A -> SUBSTITUTE '\x1b' # 0x1B -> ESCAPE '\x1c' # 0x1C -> FILE SEPARATOR '\x1d' # 0x1D -> GROUP SEPARATOR '\x1e' # 0x1E -> RECORD SEPARATOR '\x1f' # 0x1F -> UNIT SEPARATOR ' ' # 0x20 -> SPACE '!' # 0x21 -> EXCLAMATION MARK '"' # 0x22 -> QUOTATION MARK '#' # 0x23 -> NUMBER SIGN '$' # 0x24 -> DOLLAR SIGN '%' # 0x25 -> PERCENT SIGN '&' # 0x26 -> AMPERSAND "'" # 0x27 -> APOSTROPHE '(' # 0x28 -> LEFT PARENTHESIS ')' # 0x29 -> RIGHT PARENTHESIS '*' # 0x2A -> ASTERISK '+' # 0x2B -> PLUS SIGN ',' # 0x2C -> COMMA '-' # 0x2D -> HYPHEN-MINUS '.' # 0x2E -> FULL STOP '/' # 0x2F -> SOLIDUS '0' # 0x30 -> DIGIT ZERO '1' # 0x31 -> DIGIT ONE '2' # 0x32 -> DIGIT TWO '3' # 0x33 -> DIGIT THREE '4' # 0x34 -> DIGIT FOUR '5' # 0x35 -> DIGIT FIVE '6' # 0x36 -> DIGIT SIX '7' # 0x37 -> DIGIT SEVEN '8' # 0x38 -> DIGIT EIGHT '9' # 0x39 -> DIGIT NINE ':' # 0x3A -> COLON ';' # 0x3B -> SEMICOLON '<' # 0x3C -> LESS-THAN SIGN '=' # 0x3D -> EQUALS SIGN '>' # 0x3E -> GREATER-THAN SIGN '?' # 0x3F -> QUESTION MARK '@' # 0x40 -> COMMERCIAL AT 'A' # 0x41 -> LATIN CAPITAL LETTER A 'B' # 0x42 -> LATIN CAPITAL LETTER B 'C' # 0x43 -> LATIN CAPITAL LETTER C 'D' # 0x44 -> LATIN CAPITAL LETTER D 'E' # 0x45 -> LATIN CAPITAL LETTER E 'F' # 0x46 -> LATIN CAPITAL LETTER F 'G' # 0x47 -> LATIN CAPITAL LETTER G 'H' # 0x48 -> LATIN CAPITAL LETTER H 'I' # 0x49 -> LATIN CAPITAL LETTER I 'J' # 0x4A -> LATIN CAPITAL LETTER J 'K' # 0x4B -> LATIN CAPITAL LETTER K 'L' # 0x4C -> LATIN CAPITAL LETTER L 'M' # 0x4D -> LATIN CAPITAL LETTER M 'N' # 0x4E -> LATIN CAPITAL LETTER N 'O' # 0x4F -> LATIN CAPITAL LETTER O 'P' # 0x50 -> LATIN CAPITAL LETTER P 'Q' # 0x51 -> LATIN CAPITAL LETTER Q 'R' # 0x52 -> LATIN CAPITAL LETTER R 'S' # 0x53 -> LATIN CAPITAL LETTER S 'T' # 0x54 -> LATIN CAPITAL LETTER T 'U' # 0x55 -> LATIN CAPITAL LETTER U 'V' # 0x56 -> LATIN CAPITAL LETTER V 'W' # 0x57 -> LATIN CAPITAL LETTER W 'X' # 0x58 -> LATIN CAPITAL LETTER X 'Y' # 0x59 -> LATIN CAPITAL LETTER Y 'Z' # 0x5A -> LATIN CAPITAL LETTER Z '[' # 0x5B -> LEFT SQUARE BRACKET '\\' # 0x5C -> REVERSE SOLIDUS ']' # 0x5D -> RIGHT SQUARE BRACKET '^' # 0x5E -> CIRCUMFLEX ACCENT '_' # 0x5F -> LOW LINE '`' # 0x60 -> GRAVE ACCENT 'a' # 0x61 -> LATIN SMALL LETTER A 'b' # 0x62 -> LATIN SMALL LETTER B 'c' # 0x63 -> LATIN SMALL LETTER C 'd' # 0x64 -> LATIN SMALL LETTER D 'e' # 0x65 -> LATIN SMALL LETTER E 'f' # 0x66 -> LATIN SMALL LETTER F 'g' # 0x67 -> LATIN SMALL LETTER G 'h' # 0x68 -> LATIN SMALL LETTER H 'i' # 0x69 -> LATIN SMALL LETTER I 'j' # 0x6A -> LATIN SMALL LETTER J 'k' # 0x6B -> LATIN SMALL LETTER K 'l' # 0x6C -> LATIN SMALL LETTER L 'm' # 0x6D -> LATIN SMALL LETTER M 'n' # 0x6E -> LATIN SMALL LETTER N 'o' # 0x6F -> LATIN SMALL LETTER O 'p' # 0x70 -> LATIN SMALL LETTER P 'q' # 0x71 -> LATIN SMALL LETTER Q 'r' # 0x72 -> LATIN SMALL LETTER R 's' # 0x73 -> LATIN SMALL LETTER S 't' # 0x74 -> LATIN SMALL LETTER T 'u' # 0x75 -> LATIN SMALL LETTER U 'v' # 0x76 -> LATIN SMALL LETTER V 'w' # 0x77 -> LATIN SMALL LETTER W 'x' # 0x78 -> LATIN SMALL LETTER X 'y' # 0x79 -> LATIN SMALL LETTER Y 'z' # 0x7A -> LATIN SMALL LETTER Z '{' # 0x7B -> LEFT CURLY BRACKET '|' # 0x7C -> VERTICAL LINE '}' # 0x7D -> RIGHT CURLY BRACKET '~' # 0x7E -> TILDE '\x7f' # 0x7F -> DELETE '\u20ac' # 0x80 -> EURO SIGN '\ufffe' # 0x81 -> UNDEFINED '\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK '\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK '\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK '\u2026' # 0x85 -> HORIZONTAL ELLIPSIS '\u2020' # 0x86 -> DAGGER '\u2021' # 0x87 -> DOUBLE DAGGER '\ufffe' # 0x88 -> UNDEFINED '\u2030' # 0x89 -> PER MILLE SIGN '\ufffe' # 0x8A -> UNDEFINED '\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK '\ufffe' # 0x8C -> UNDEFINED '\ufffe' # 0x8D -> UNDEFINED '\ufffe' # 0x8E -> UNDEFINED '\ufffe' # 0x8F -> UNDEFINED '\ufffe' # 0x90 -> UNDEFINED '\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK '\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK '\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK '\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK '\u2022' # 0x95 -> BULLET '\u2013' # 0x96 -> EN DASH '\u2014' # 0x97 -> EM DASH '\ufffe' # 0x98 -> UNDEFINED '\u2122' # 0x99 -> TRADE MARK SIGN '\ufffe' # 0x9A -> UNDEFINED '\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK '\ufffe' # 0x9C -> UNDEFINED '\ufffe' # 0x9D -> UNDEFINED '\ufffe' # 0x9E -> UNDEFINED '\ufffe' # 0x9F -> UNDEFINED '\xa0' # 0xA0 -> NO-BREAK SPACE '\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS '\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS '\xa3' # 0xA3 -> POUND SIGN '\xa4' # 0xA4 -> CURRENCY SIGN '\xa5' # 0xA5 -> YEN SIGN '\xa6' # 0xA6 -> BROKEN BAR '\xa7' # 0xA7 -> SECTION SIGN '\xa8' # 0xA8 -> DIAERESIS '\xa9' # 0xA9 -> COPYRIGHT SIGN '\ufffe' # 0xAA -> UNDEFINED '\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK '\xac' # 0xAC -> NOT SIGN '\xad' # 0xAD -> SOFT HYPHEN '\xae' # 0xAE -> REGISTERED SIGN '\u2015' # 0xAF -> HORIZONTAL BAR '\xb0' # 0xB0 -> DEGREE SIGN '\xb1' # 0xB1 -> PLUS-MINUS SIGN '\xb2' # 0xB2 -> SUPERSCRIPT TWO '\xb3' # 0xB3 -> SUPERSCRIPT THREE '\u0384' # 0xB4 -> GREEK TONOS '\xb5' # 0xB5 -> MICRO SIGN '\xb6' # 0xB6 -> PILCROW SIGN '\xb7' # 0xB7 -> MIDDLE DOT '\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS '\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS '\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS '\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK '\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS '\xbd' # 0xBD -> VULGAR FRACTION ONE HALF '\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS '\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS '\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS '\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA '\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA '\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA '\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA '\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON '\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA '\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA '\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA '\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA '\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA '\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA '\u039c' # 0xCC -> GREEK CAPITAL LETTER MU '\u039d' # 0xCD -> GREEK CAPITAL LETTER NU '\u039e' # 0xCE -> GREEK CAPITAL LETTER XI '\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON '\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI '\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO '\ufffe' # 0xD2 -> UNDEFINED '\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA '\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU '\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON '\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI '\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI '\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI '\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA '\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA '\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA '\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS '\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS '\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS '\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS '\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS '\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA '\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA '\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA '\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA '\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON '\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA '\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA '\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA '\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA '\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA '\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA '\u03bc' # 0xEC -> GREEK SMALL LETTER MU '\u03bd' # 0xED -> GREEK SMALL LETTER NU '\u03be' # 0xEE -> GREEK SMALL LETTER XI '\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON '\u03c0' # 0xF0 -> GREEK SMALL LETTER PI '\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO '\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA '\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA '\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU '\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON '\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI '\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI '\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI '\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA '\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA '\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA '\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS '\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS '\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS '\ufffe' # 0xFF -> UNDEFINED ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
lgpl-3.0
emprovements/binwalk
src/binwalk/modules/hexdiff.py
18
6926
import os import sys import string import binwalk.core.common as common from binwalk.core.compat import * from binwalk.core.module import Module, Option, Kwarg class HexDiff(Module): COLORS = { 'red' : '31', 'green' : '32', 'blue' : '34', } SEPERATORS = ['\\', '/'] DEFAULT_BLOCK_SIZE = 16 SKIPPED_LINE = "*" CUSTOM_DISPLAY_FORMAT = "0x%.8X %s" TITLE = "Binary Diffing" CLI = [ Option(short='W', long='hexdump', kwargs={'enabled' : True}, description='Perform a hexdump / diff of a file or files'), Option(short='G', long='green', kwargs={'show_green' : True}, description='Only show lines containing bytes that are the same among all files'), Option(short='i', long='red', kwargs={'show_red' : True}, description='Only show lines containing bytes that are different among all files'), Option(short='U', long='blue', kwargs={'show_blue' : True}, description='Only show lines containing bytes that are different among some files'), Option(short='w', long='terse', kwargs={'terse' : True}, description='Diff all files, but only display a hex dump of the first file'), ] KWARGS = [ Kwarg(name='show_red', default=False), Kwarg(name='show_blue', default=False), Kwarg(name='show_green', default=False), Kwarg(name='terse', default=False), Kwarg(name='enabled', default=False), ] RESULT_FORMAT = "%s\n" RESULT = ['display'] def _no_colorize(self, c, color="red", bold=True): return c def _colorize(self, c, color="red", bold=True): attr = [] attr.append(self.COLORS[color]) if bold: attr.append('1') return "\x1b[%sm%s\x1b[0m" % (';'.join(attr), c) def _color_filter(self, data): red = '\x1b[' + self.COLORS['red'] + ';' green = '\x1b[' + self.COLORS['green'] + ';' blue = '\x1b[' + self.COLORS['blue'] + ';' if self.show_blue and blue in data: return True elif self.show_green and green in data: return True elif self.show_red and red in data: return True return False def hexascii(self, target_data, byte, offset): color = "green" for (fp_i, data_i) in iterator(target_data): diff_count = 0 for (fp_j, data_j) in iterator(target_data): if fp_i == fp_j: continue try: if data_i[offset] != data_j[offset]: diff_count += 1 except IndexError as e: diff_count += 1 if diff_count == len(target_data)-1: color = "red" elif diff_count > 0: color = "blue" break hexbyte = self.colorize("%.2X" % ord(byte), color) if byte not in string.printable or byte in string.whitespace: byte = "." asciibyte = self.colorize(byte, color) return (hexbyte, asciibyte) def diff_files(self, target_files): last_line = None loop_count = 0 sep_count = 0 while True: line = "" done_files = 0 block_data = {} seperator = self.SEPERATORS[sep_count % 2] for fp in target_files: block_data[fp] = fp.read(self.block) if not block_data[fp]: done_files += 1 # No more data from any of the target files? Done. if done_files == len(target_files): break for fp in target_files: hexline = "" asciiline = "" for i in range(0, self.block): if i >= len(block_data[fp]): hexbyte = "XX" asciibyte = "." else: (hexbyte, asciibyte) = self.hexascii(block_data, block_data[fp][i], i) hexline += "%s " % hexbyte asciiline += "%s" % asciibyte line += "%s |%s|" % (hexline, asciiline) if self.terse: break if fp != target_files[-1]: line += " %s " % seperator offset = fp.offset + (self.block * loop_count) if not self._color_filter(line): display = line = self.SKIPPED_LINE else: display = self.CUSTOM_DISPLAY_FORMAT % (offset, line) sep_count += 1 if line != self.SKIPPED_LINE or last_line != line: self.result(offset=offset, description=line, display=display) last_line = line loop_count += 1 def init(self): # To mimic expected behavior, if all options are False, we show everything if not any([self.show_red, self.show_green, self.show_blue]): self.show_red = self.show_green = self.show_blue = True # Always disable terminal formatting, as it won't work properly with colorized output self.config.display.fit_to_screen = False # Set the block size (aka, hexdump line size) self.block = self.config.block if not self.block: self.block = self.DEFAULT_BLOCK_SIZE # Build a list of files to hexdiff self.hex_target_files = [] while True: f = self.next_file(close_previous=False) if not f: break else: self.hex_target_files.append(f) # Build the header format string header_width = (self.block * 4) + 2 if self.terse: file_count = 1 else: file_count = len(self.hex_target_files) self.HEADER_FORMAT = "OFFSET " + (("%%-%ds " % header_width) * file_count) + "\n" # Build the header argument list self.HEADER = [fp.name for fp in self.hex_target_files] if self.terse and len(self.HEADER) > 1: self.HEADER = self.HEADER[0] # Set up the tty for colorization, if it is supported if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty() and not common.MSWindows(): import curses curses.setupterm() self.colorize = self._colorize else: self.colorize = self._no_colorize def run(self): if self.hex_target_files: self.header() self.diff_files(self.hex_target_files) self.footer()
mit
HiSPARC/station-software
user/python/Lib/hotshot/log.py
175
6239
import _hotshot import os.path import parser import symbol from _hotshot import \ WHAT_ENTER, \ WHAT_EXIT, \ WHAT_LINENO, \ WHAT_DEFINE_FILE, \ WHAT_DEFINE_FUNC, \ WHAT_ADD_INFO __all__ = ["LogReader", "ENTER", "EXIT", "LINE"] ENTER = WHAT_ENTER EXIT = WHAT_EXIT LINE = WHAT_LINENO class LogReader: def __init__(self, logfn): # fileno -> filename self._filemap = {} # (fileno, lineno) -> filename, funcname self._funcmap = {} self._reader = _hotshot.logreader(logfn) self._nextitem = self._reader.next self._info = self._reader.info if 'current-directory' in self._info: self.cwd = self._info['current-directory'] else: self.cwd = None # This mirrors the call stack of the profiled code as the log # is read back in. It contains tuples of the form: # # (file name, line number of function def, function name) # self._stack = [] self._append = self._stack.append self._pop = self._stack.pop def close(self): self._reader.close() def fileno(self): """Return the file descriptor of the log reader's log file.""" return self._reader.fileno() def addinfo(self, key, value): """This method is called for each additional ADD_INFO record. This can be overridden by applications that want to receive these events. The default implementation does not need to be called by alternate implementations. The initial set of ADD_INFO records do not pass through this mechanism; this is only needed to receive notification when new values are added. Subclasses can inspect self._info after calling LogReader.__init__(). """ pass def get_filename(self, fileno): try: return self._filemap[fileno] except KeyError: raise ValueError, "unknown fileno" def get_filenames(self): return self._filemap.values() def get_fileno(self, filename): filename = os.path.normcase(os.path.normpath(filename)) for fileno, name in self._filemap.items(): if name == filename: return fileno raise ValueError, "unknown filename" def get_funcname(self, fileno, lineno): try: return self._funcmap[(fileno, lineno)] except KeyError: raise ValueError, "unknown function location" # Iteration support: # This adds an optional (& ignored) parameter to next() so that the # same bound method can be used as the __getitem__() method -- this # avoids using an additional method call which kills the performance. def next(self, index=0): while 1: # This call may raise StopIteration: what, tdelta, fileno, lineno = self._nextitem() # handle the most common cases first if what == WHAT_ENTER: filename, funcname = self._decode_location(fileno, lineno) t = (filename, lineno, funcname) self._append(t) return what, t, tdelta if what == WHAT_EXIT: try: return what, self._pop(), tdelta except IndexError: raise StopIteration if what == WHAT_LINENO: filename, firstlineno, funcname = self._stack[-1] return what, (filename, lineno, funcname), tdelta if what == WHAT_DEFINE_FILE: filename = os.path.normcase(os.path.normpath(tdelta)) self._filemap[fileno] = filename elif what == WHAT_DEFINE_FUNC: filename = self._filemap[fileno] self._funcmap[(fileno, lineno)] = (filename, tdelta) elif what == WHAT_ADD_INFO: # value already loaded into self.info; call the # overridable addinfo() handler so higher-level code # can pick up the new value if tdelta == 'current-directory': self.cwd = lineno self.addinfo(tdelta, lineno) else: raise ValueError, "unknown event type" def __iter__(self): return self # # helpers # def _decode_location(self, fileno, lineno): try: return self._funcmap[(fileno, lineno)] except KeyError: # # This should only be needed when the log file does not # contain all the DEFINE_FUNC records needed to allow the # function name to be retrieved from the log file. # if self._loadfile(fileno): filename = funcname = None try: filename, funcname = self._funcmap[(fileno, lineno)] except KeyError: filename = self._filemap.get(fileno) funcname = None self._funcmap[(fileno, lineno)] = (filename, funcname) return filename, funcname def _loadfile(self, fileno): try: filename = self._filemap[fileno] except KeyError: print "Could not identify fileId", fileno return 1 if filename is None: return 1 absname = os.path.normcase(os.path.join(self.cwd, filename)) try: fp = open(absname) except IOError: return st = parser.suite(fp.read()) fp.close() # Scan the tree looking for def and lambda nodes, filling in # self._funcmap with all the available information. funcdef = symbol.funcdef lambdef = symbol.lambdef stack = [st.totuple(1)] while stack: tree = stack.pop() try: sym = tree[0] except (IndexError, TypeError): continue if sym == funcdef: self._funcmap[(fileno, tree[2][2])] = filename, tree[2][1] elif sym == lambdef: self._funcmap[(fileno, tree[1][2])] = filename, "<lambda>" stack.extend(list(tree[1:]))
gpl-3.0
qtproject/pyside-pyside
tests/QtWidgets/qpen_test.py
1
2519
############################################################################# ## ## Copyright (C) 2016 The Qt Company Ltd. ## Contact: https://www.qt.io/licensing/ ## ## This file is part of the test suite of PySide2. ## ## $QT_BEGIN_LICENSE:GPL-EXCEPT$ ## Commercial License Usage ## Licensees holding valid commercial Qt licenses may use this file in ## accordance with the commercial license agreement provided with the ## Software or, alternatively, in accordance with the terms contained in ## a written agreement between you and The Qt Company. For licensing terms ## and conditions see https://www.qt.io/terms-conditions. For further ## information use the contact form at https://www.qt.io/contact-us. ## ## GNU General Public License Usage ## Alternatively, this file may be used under the terms of the GNU ## General Public License version 3 as published by the Free Software ## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT ## included in the packaging of this file. Please review the following ## information to ensure the GNU General Public License requirements will ## be met: https://www.gnu.org/licenses/gpl-3.0.html. ## ## $QT_END_LICENSE$ ## ############################################################################# import unittest from helper import UsesQApplication from PySide2.QtCore import Qt, QTimer from PySide2.QtGui import QPen, QPainter from PySide2.QtWidgets import QWidget class Painting(QWidget): def __init__(self): QWidget.__init__(self) self.penFromEnum = None self.penFromInteger = None def paintEvent(self, event): painter = QPainter(self) painter.setPen(Qt.NoPen) self.penFromEnum = painter.pen() painter.setPen(int(Qt.NoPen)) self.penFromInteger = painter.pen() class QPenTest(UsesQApplication): def testCtorWithCreatedEnums(self): '''A simple case of QPen creation using created enums.''' width = 0 style = Qt.PenStyle(0) cap = Qt.PenCapStyle(0) join = Qt.PenJoinStyle(0) pen = QPen(Qt.blue, width, style, cap, join) def testSetPenWithPenStyleEnum(self): '''Calls QPainter.setPen with both enum and integer. Bug #511.''' w = Painting() w.show() QTimer.singleShot(1000, self.app.quit) self.app.exec_() self.assertEqual(w.penFromEnum.style(), Qt.NoPen) self.assertEqual(w.penFromInteger.style(), Qt.SolidLine) if __name__ == '__main__': unittest.main()
lgpl-2.1
karcio/checkSumValidatorGUI
checkSumVal/src/checkSumGui.py
1
2950
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'checkSumGui.ui' # # Created: Thu Jan 8 02:22:42 2015 # by: PyQt5 UI code generator 5.4 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(390, 210) self.label = QtWidgets.QLabel(Form) self.label.setGeometry(QtCore.QRect(20, 20, 76, 15)) self.label.setObjectName("label") self.label_2 = QtWidgets.QLabel(Form) self.label_2.setGeometry(QtCore.QRect(20, 70, 76, 15)) self.label_2.setObjectName("label_2") self.label_3 = QtWidgets.QLabel(Form) self.label_3.setGeometry(QtCore.QRect(20, 120, 36, 15)) self.label_3.setObjectName("label_3") self.pushButton = QtWidgets.QPushButton(Form) self.pushButton.setGeometry(QtCore.QRect(280, 160, 92, 27)) self.pushButton.setObjectName("pushButton") self.lineEdit = QtWidgets.QLineEdit(Form) self.lineEdit.setGeometry(QtCore.QRect(120, 20, 250, 25)) self.lineEdit.setMaxLength(32) self.lineEdit.setObjectName("lineEdit") self.lineEdit_2 = QtWidgets.QLineEdit(Form) self.lineEdit_2.setGeometry(QtCore.QRect(120, 70, 250, 25)) self.lineEdit_2.setMaxLength(32) self.lineEdit_2.setObjectName("lineEdit_2") self.label_4 = QtWidgets.QLabel(Form) self.label_4.setGeometry(QtCore.QRect(120, 120, 251, 16)) font = QtGui.QFont() font.setBold(True) font.setWeight(75) font.setStyleStrategy(QtGui.QFont.PreferAntialias) self.label_4.setFont(font) self.label_4.setText("") self.label_4.setObjectName("label_4") self.pushButton_2 = QtWidgets.QPushButton(Form) self.pushButton_2.setGeometry(QtCore.QRect(170, 160, 92, 27)) self.pushButton_2.setObjectName("pushButton_2") self.retranslateUi(Form) self.pushButton_2.clicked.connect(Form.close) QtCore.QMetaObject.connectSlotsByName(Form) self.pushButton.clicked.connect(self.validation_b) def validation_b(self): text1 = self.lineEdit.text() text2 = self.lineEdit_2.text() if text1 == text2: result = "True - identical" else: result = "False - NOT identical" self.label_4.setText(repr(result)) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Check Sum validator v 0.2")) self.label.setText(_translate("Form", "insert string")) self.label_2.setText(_translate("Form", "insert string")) self.label_3.setText(_translate("Form", "result")) self.pushButton.setText(_translate("Form", "&validate")) self.pushButton_2.setText(_translate("Form", "&exit"))
gpl-3.0
wang1352083/pythontool
python-2.7.12-lib/unittest/test/test_functiontestcase.py
37
5547
import unittest from unittest.test.support import LoggingResult class Test_FunctionTestCase(unittest.TestCase): # "Return the number of tests represented by the this test object. For # TestCase instances, this will always be 1" def test_countTestCases(self): test = unittest.FunctionTestCase(lambda: None) self.assertEqual(test.countTestCases(), 1) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if setUp() raises # an exception. def test_run_call_order__error_in_setUp(self): events = [] result = LoggingResult(events) def setUp(): events.append('setUp') raise RuntimeError('raised by setUp') def test(): events.append('test') def tearDown(): events.append('tearDown') expected = ['startTest', 'setUp', 'addError', 'stopTest'] unittest.FunctionTestCase(test, setUp, tearDown).run(result) self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if the test raises # an error (as opposed to a failure). def test_run_call_order__error_in_test(self): events = [] result = LoggingResult(events) def setUp(): events.append('setUp') def test(): events.append('test') raise RuntimeError('raised by test') def tearDown(): events.append('tearDown') expected = ['startTest', 'setUp', 'test', 'addError', 'tearDown', 'stopTest'] unittest.FunctionTestCase(test, setUp, tearDown).run(result) self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if the test signals # a failure (as opposed to an error). def test_run_call_order__failure_in_test(self): events = [] result = LoggingResult(events) def setUp(): events.append('setUp') def test(): events.append('test') self.fail('raised by test') def tearDown(): events.append('tearDown') expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown', 'stopTest'] unittest.FunctionTestCase(test, setUp, tearDown).run(result) self.assertEqual(events, expected) # "When a setUp() method is defined, the test runner will run that method # prior to each test. Likewise, if a tearDown() method is defined, the # test runner will invoke that method after each test. In the example, # setUp() was used to create a fresh sequence for each test." # # Make sure the proper call order is maintained, even if tearDown() raises # an exception. def test_run_call_order__error_in_tearDown(self): events = [] result = LoggingResult(events) def setUp(): events.append('setUp') def test(): events.append('test') def tearDown(): events.append('tearDown') raise RuntimeError('raised by tearDown') expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError', 'stopTest'] unittest.FunctionTestCase(test, setUp, tearDown).run(result) self.assertEqual(events, expected) # "Return a string identifying the specific test case." # # Because of the vague nature of the docs, I'm not going to lock this # test down too much. Really all that can be asserted is that the id() # will be a string (either 8-byte or unicode -- again, because the docs # just say "string") def test_id(self): test = unittest.FunctionTestCase(lambda: None) self.assertIsInstance(test.id(), basestring) # "Returns a one-line description of the test, or None if no description # has been provided. The default implementation of this method returns # the first line of the test method's docstring, if available, or None." def test_shortDescription__no_docstring(self): test = unittest.FunctionTestCase(lambda: None) self.assertEqual(test.shortDescription(), None) # "Returns a one-line description of the test, or None if no description # has been provided. The default implementation of this method returns # the first line of the test method's docstring, if available, or None." def test_shortDescription__singleline_docstring(self): desc = "this tests foo" test = unittest.FunctionTestCase(lambda: None, description=desc) self.assertEqual(test.shortDescription(), "this tests foo") if __name__ == '__main__': unittest.main()
mit
andreparrish/python-for-android
python3-alpha/python3-src/Lib/distutils/tests/test_bdist_rpm.py
47
4029
"""Tests for distutils.command.bdist_rpm.""" import unittest import sys import os import tempfile import shutil from test.support import run_unittest from distutils.core import Distribution from distutils.command.bdist_rpm import bdist_rpm from distutils.tests import support from distutils.spawn import find_executable from distutils import spawn from distutils.errors import DistutilsExecError SETUP_PY = """\ from distutils.core import setup import foo setup(name='foo', version='0.1', py_modules=['foo'], url='xxx', author='xxx', author_email='xxx') """ class BuildRpmTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): def setUp(self): super(BuildRpmTestCase, self).setUp() self.old_location = os.getcwd() self.old_sys_argv = sys.argv, sys.argv[:] def tearDown(self): os.chdir(self.old_location) sys.argv = self.old_sys_argv[0] sys.argv[:] = self.old_sys_argv[1] super(BuildRpmTestCase, self).tearDown() def test_quiet(self): # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X if sys.platform != 'linux2': return # this test will run only if the rpm commands are found if (find_executable('rpm') is None or find_executable('rpmbuild') is None): return # let's create a package tmp_dir = self.mkdtemp() pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) self.write_file((pkg_dir, 'foo.py'), '#') self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') dist = Distribution({'name': 'foo', 'version': '0.1', 'py_modules': ['foo'], 'url': 'xxx', 'author': 'xxx', 'author_email': 'xxx'}) dist.script_name = 'setup.py' os.chdir(pkg_dir) sys.argv = ['setup.py'] cmd = bdist_rpm(dist) cmd.fix_python = True # running in quiet mode cmd.quiet = 1 cmd.ensure_finalized() cmd.run() dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) self.assertTrue('foo-0.1-1.noarch.rpm' in dist_created) def test_no_optimize_flag(self): # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X if sys.platform != 'linux2': return # http://bugs.python.org/issue1533164 # this test will run only if the rpm command is found if (find_executable('rpm') is None or find_executable('rpmbuild') is None): return # let's create a package that brakes bdist_rpm tmp_dir = self.mkdtemp() pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) self.write_file((pkg_dir, 'foo.py'), '#') self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') dist = Distribution({'name': 'foo', 'version': '0.1', 'py_modules': ['foo'], 'url': 'xxx', 'author': 'xxx', 'author_email': 'xxx'}) dist.script_name = 'setup.py' os.chdir(pkg_dir) sys.argv = ['setup.py'] cmd = bdist_rpm(dist) cmd.fix_python = True cmd.quiet = 1 cmd.ensure_finalized() cmd.run() dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) self.assertTrue('foo-0.1-1.noarch.rpm' in dist_created) os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) def test_suite(): return unittest.makeSuite(BuildRpmTestCase) if __name__ == '__main__': run_unittest(test_suite())
apache-2.0
csvtools/csvtools
src/tests/test_pointsizes.py
1
1209
# Allow direct execution import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import unittest from lib.pointsizes import Pointsizes class SheetTestCase(unittest.TestCase): def setUp(self): pass def test(self): self.assertEquals(Pointsizes.min(), 1) self.assertEquals(Pointsizes.max(), 23) self.assertEquals(Pointsizes.normal(), 12) self.assertEquals(Pointsizes.percentage(12), 100) self.assertEquals(Pointsizes.toFontSize(12), 12) self.assertEquals(Pointsizes.toFontSize(50), 72) self.assertEquals(Pointsizes.toFontSize(-2), 1) self.assertEquals(Pointsizes.toFontSize(16), 20) self.assertEquals(Pointsizes.zoom(11, +2), 13) self.assertEquals(Pointsizes.zoom(11, +20), 23) self.assertEquals(Pointsizes.zoom(11, -2), 9) self.assertEquals(Pointsizes.zoom(11, -20), 1) self.assertEquals(Pointsizes.toPointSize(11), 11) self.assertEquals(Pointsizes.toPointSize(100), 23) self.assertEquals(Pointsizes.toPointSize(0), 1) self.assertEquals(Pointsizes.toPointSize(14), 13) if __name__ == '__main__': unittest.main()
gpl-3.0
mkobos/tree_crawler
concurrent_tree_crawler/multithreaded_crawler.py
1
5649
import os import logging import time import datetime from concurrent_tree_crawler.common.file_helper import lenient_makedir from concurrent_tree_crawler.common.logger import Logger from concurrent_tree_crawler.common.activity_schedule import AlwaysActiveSchedule from concurrent_tree_crawler.crawlers_manager import CrawlersManager from concurrent_tree_crawler.rw_lock_tree_accessor import RWLockTreeAccessor from concurrent_tree_crawler.navigator_tree_wrapper import NavigatorTreeWrapper from concurrent_tree_crawler.tree_saver_thread import TreeSaverThread from concurrent_tree_crawler.abstract_node import NodeState from concurrent_tree_crawler.xml_tree_serialization import XMLTreeReader class MultithreadedCrawler: """ Runs several threads to crawl the tree. It is also responsible for all the ancillary stuff: makes sure that the state of the tree is saved to disk, sets up the logging level etc. """ def __init__(self, navigators, sentinel, activity_schedule=None, log_file_path=None, state_file_path=None, save_period=None, logging_level=logging.ERROR): """ @param navigators: list of navigators to be used by the crawler. Each navigator will be run in a separate thread, thus the number of the threads is equal to the number of navigators. @type navigators: list of L{AbstractTreeNavigator}s @param sentinel: a technical node which will be made parent of the root node. @type sentinel: L{AbstractNode} @param activity_schedule: if C{None}, no schedule is used and the program works until it finishes crawling. @type activity_schedule: L{AbstractActivitySchedule} @param log_file_path: path to the log file. If C{None}, no log file will be used. @param state_file_path: path to the file where the state of the program will be saved. If C{None}, the state will not be saved. @param save_period: time between saving the tree state. If C{state_file_path} is C{None}, this value is ignored. @param logging_level: one of the logging level constants from C{logging} """ if log_file_path is not None: lenient_makedir(os.path.dirname(log_file_path)) if state_file_path is not None: if os.path.exists(state_file_path): print "State file already exists. Loading the tree from this "\ "file and changing nodes with state PROCESSING to OPEN ... ", self.__load_state_file(state_file_path, sentinel) print "Done." else: lenient_makedir(os.path.dirname(state_file_path)) self.__tree = RWLockTreeAccessor(sentinel) self.__navigators = navigators self.__manager = None self.__state_file_path = state_file_path self.__save_period = save_period self.__activity_schedule = activity_schedule if activity_schedule is None: self.__activity_schedule = AlwaysActiveSchedule() self.__logging_level = logging_level self.__log_file_path = log_file_path def run(self): """ @return: sentinel node @rtype: L{AbstractNode} """ self.__manager = self._create_crawlers_manager( self.__tree, self.__navigators) if self.__log_file_path is not None: Logger.start(file_path=self.__log_file_path, logging_level=self.__logging_level) while True: activity_time = self.__sleep_until_activity_period() saver_thread = None if self.__state_file_path is not None: saver_thread = self.__start_tree_saver_thread() self.__manager.start() threads_finished = \ self.__manager.wait_until_finish(timeout=activity_time) if self.__state_file_path is not None: saver_thread.stop_activity() saver_thread.join() if threads_finished: break if self.__log_file_path is not None: Logger.stop() return self.__tree.get_sentinel() def _create_crawlers_manager(self, tree, navigators): navigator_wrappers = [] for navigator in navigators: navigator_wrapper = NavigatorTreeWrapper(navigator, tree) navigator_wrappers.append(navigator_wrapper) return CrawlersManager(tree, navigator_wrappers) def __start_tree_saver_thread(self): t = TreeSaverThread( self.__state_file_path, self.__tree, self.__save_period) t.daemon = True t.start() return t def __sleep_until_activity_period(self): """ Sleep (stop program execution) until there's a time to wake up. @return: activity time, i.e. time until the start of the next sleep period, C{None} if such time point cannot be determined (as in case when the activity time will not stop in future). @rtype: number of seconds """ while True: now = datetime.datetime.now() info = self.__activity_schedule.get_activity_info(now) if info.future_mode_change is None: if info.is_in_activity_period: return None else: raise Exception("Going to sleep forever?") mode_change_time = (info.future_mode_change - now).total_seconds() if not info.is_in_activity_period: logging.info("Going to sleep for {:.1f} seconds " "(according to schedule)".format( mode_change_time)) time.sleep(mode_change_time) logging.info("Awaken") else: logging.info("Starting activity for {:.1f} seconds " "(according to schedule)".format( mode_change_time)) return mode_change_time @staticmethod def __load_state_file(file_path, sentinel): with open(file_path) as f: reader = XMLTreeReader(f) reader.read(sentinel) MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN( sentinel.get_child("root")) @staticmethod def __change_state_from_PROCESSING_to_OPEN(node): if node.get_state() == NodeState.PROCESSING: node.set_state(NodeState.OPEN) for child in node.get_children(): MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN(child)
mit
CamelBackNotation/CarnotKE
jyhton/lib-python/2.7/encodings/iso8859_6.py
593
11089
""" Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='iso8859-6', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\x80' # 0x80 -> <control> u'\x81' # 0x81 -> <control> u'\x82' # 0x82 -> <control> u'\x83' # 0x83 -> <control> u'\x84' # 0x84 -> <control> u'\x85' # 0x85 -> <control> u'\x86' # 0x86 -> <control> u'\x87' # 0x87 -> <control> u'\x88' # 0x88 -> <control> u'\x89' # 0x89 -> <control> u'\x8a' # 0x8A -> <control> u'\x8b' # 0x8B -> <control> u'\x8c' # 0x8C -> <control> u'\x8d' # 0x8D -> <control> u'\x8e' # 0x8E -> <control> u'\x8f' # 0x8F -> <control> u'\x90' # 0x90 -> <control> u'\x91' # 0x91 -> <control> u'\x92' # 0x92 -> <control> u'\x93' # 0x93 -> <control> u'\x94' # 0x94 -> <control> u'\x95' # 0x95 -> <control> u'\x96' # 0x96 -> <control> u'\x97' # 0x97 -> <control> u'\x98' # 0x98 -> <control> u'\x99' # 0x99 -> <control> u'\x9a' # 0x9A -> <control> u'\x9b' # 0x9B -> <control> u'\x9c' # 0x9C -> <control> u'\x9d' # 0x9D -> <control> u'\x9e' # 0x9E -> <control> u'\x9f' # 0x9F -> <control> u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\ufffe' u'\ufffe' u'\ufffe' u'\xa4' # 0xA4 -> CURRENCY SIGN u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\u060c' # 0xAC -> ARABIC COMMA u'\xad' # 0xAD -> SOFT HYPHEN u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\u061b' # 0xBB -> ARABIC SEMICOLON u'\ufffe' u'\ufffe' u'\ufffe' u'\u061f' # 0xBF -> ARABIC QUESTION MARK u'\ufffe' u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE u'\u0627' # 0xC7 -> ARABIC LETTER ALEF u'\u0628' # 0xC8 -> ARABIC LETTER BEH u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA u'\u062a' # 0xCA -> ARABIC LETTER TEH u'\u062b' # 0xCB -> ARABIC LETTER THEH u'\u062c' # 0xCC -> ARABIC LETTER JEEM u'\u062d' # 0xCD -> ARABIC LETTER HAH u'\u062e' # 0xCE -> ARABIC LETTER KHAH u'\u062f' # 0xCF -> ARABIC LETTER DAL u'\u0630' # 0xD0 -> ARABIC LETTER THAL u'\u0631' # 0xD1 -> ARABIC LETTER REH u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN u'\u0633' # 0xD3 -> ARABIC LETTER SEEN u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN u'\u0635' # 0xD5 -> ARABIC LETTER SAD u'\u0636' # 0xD6 -> ARABIC LETTER DAD u'\u0637' # 0xD7 -> ARABIC LETTER TAH u'\u0638' # 0xD8 -> ARABIC LETTER ZAH u'\u0639' # 0xD9 -> ARABIC LETTER AIN u'\u063a' # 0xDA -> ARABIC LETTER GHAIN u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\u0640' # 0xE0 -> ARABIC TATWEEL u'\u0641' # 0xE1 -> ARABIC LETTER FEH u'\u0642' # 0xE2 -> ARABIC LETTER QAF u'\u0643' # 0xE3 -> ARABIC LETTER KAF u'\u0644' # 0xE4 -> ARABIC LETTER LAM u'\u0645' # 0xE5 -> ARABIC LETTER MEEM u'\u0646' # 0xE6 -> ARABIC LETTER NOON u'\u0647' # 0xE7 -> ARABIC LETTER HEH u'\u0648' # 0xE8 -> ARABIC LETTER WAW u'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA u'\u064a' # 0xEA -> ARABIC LETTER YEH u'\u064b' # 0xEB -> ARABIC FATHATAN u'\u064c' # 0xEC -> ARABIC DAMMATAN u'\u064d' # 0xED -> ARABIC KASRATAN u'\u064e' # 0xEE -> ARABIC FATHA u'\u064f' # 0xEF -> ARABIC DAMMA u'\u0650' # 0xF0 -> ARABIC KASRA u'\u0651' # 0xF1 -> ARABIC SHADDA u'\u0652' # 0xF2 -> ARABIC SUKUN u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' u'\ufffe' ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
apache-2.0
DucQuang1/youtube-dl
test/test_all_urls.py
102
6849
#!/usr/bin/env python from __future__ import unicode_literals # Allow direct execution import os import sys import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from test.helper import gettestcases from youtube_dl.extractor import ( FacebookIE, gen_extractors, YoutubeIE, ) class TestAllURLsMatching(unittest.TestCase): def setUp(self): self.ies = gen_extractors() def matching_ies(self, url): return [ie.IE_NAME for ie in self.ies if ie.suitable(url) and ie.IE_NAME != 'generic'] def assertMatch(self, url, ie_list): self.assertEqual(self.matching_ies(url), ie_list) def test_youtube_playlist_matching(self): assertPlaylist = lambda url: self.assertMatch(url, ['youtube:playlist']) assertPlaylist('ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8') assertPlaylist('UUBABnxM4Ar9ten8Mdjj1j0Q') # 585 assertPlaylist('PL63F0C78739B09958') assertPlaylist('https://www.youtube.com/playlist?list=UUBABnxM4Ar9ten8Mdjj1j0Q') assertPlaylist('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8') assertPlaylist('https://www.youtube.com/playlist?list=PLwP_SiAcdui0KVebT0mU9Apz359a4ubsC') assertPlaylist('https://www.youtube.com/watch?v=AV6J6_AeFEQ&playnext=1&list=PL4023E734DA416012') # 668 self.assertFalse('youtube:playlist' in self.matching_ies('PLtS2H6bU1M')) # Top tracks assertPlaylist('https://www.youtube.com/playlist?list=MCUS.20142101') def test_youtube_matching(self): self.assertTrue(YoutubeIE.suitable('PLtS2H6bU1M')) self.assertFalse(YoutubeIE.suitable('https://www.youtube.com/watch?v=AV6J6_AeFEQ&playnext=1&list=PL4023E734DA416012')) # 668 self.assertMatch('http://youtu.be/BaW_jenozKc', ['youtube']) self.assertMatch('http://www.youtube.com/v/BaW_jenozKc', ['youtube']) self.assertMatch('https://youtube.googleapis.com/v/BaW_jenozKc', ['youtube']) self.assertMatch('http://www.cleanvideosearch.com/media/action/yt/watch?videoId=8v_4O44sfjM', ['youtube']) def test_youtube_channel_matching(self): assertChannel = lambda url: self.assertMatch(url, ['youtube:channel']) assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM') assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM?feature=gb_ch_rec') assertChannel('https://www.youtube.com/channel/HCtnHdj3df7iM/videos') def test_youtube_user_matching(self): self.assertMatch('www.youtube.com/NASAgovVideo/videos', ['youtube:user']) def test_youtube_feeds(self): self.assertMatch('https://www.youtube.com/feed/watch_later', ['youtube:watchlater']) self.assertMatch('https://www.youtube.com/feed/subscriptions', ['youtube:subscriptions']) self.assertMatch('https://www.youtube.com/feed/recommended', ['youtube:recommended']) self.assertMatch('https://www.youtube.com/my_favorites', ['youtube:favorites']) def test_youtube_show_matching(self): self.assertMatch('http://www.youtube.com/show/airdisasters', ['youtube:show']) def test_youtube_search_matching(self): self.assertMatch('http://www.youtube.com/results?search_query=making+mustard', ['youtube:search_url']) self.assertMatch('https://www.youtube.com/results?baz=bar&search_query=youtube-dl+test+video&filters=video&lclk=video', ['youtube:search_url']) def test_youtube_extract(self): assertExtractId = lambda url, id: self.assertEqual(YoutubeIE.extract_id(url), id) assertExtractId('http://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc') assertExtractId('https://www.youtube.com/watch?&v=BaW_jenozKc', 'BaW_jenozKc') assertExtractId('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc', 'BaW_jenozKc') assertExtractId('https://www.youtube.com/watch_popup?v=BaW_jenozKc', 'BaW_jenozKc') assertExtractId('http://www.youtube.com/watch?v=BaW_jenozKcsharePLED17F32AD9753930', 'BaW_jenozKc') assertExtractId('BaW_jenozKc', 'BaW_jenozKc') def test_facebook_matching(self): self.assertTrue(FacebookIE.suitable('https://www.facebook.com/Shiniknoh#!/photo.php?v=10153317450565268')) self.assertTrue(FacebookIE.suitable('https://www.facebook.com/cindyweather?fref=ts#!/photo.php?v=10152183998945793')) def test_no_duplicates(self): ies = gen_extractors() for tc in gettestcases(include_onlymatching=True): url = tc['url'] for ie in ies: if type(ie).__name__ in ('GenericIE', tc['name'] + 'IE'): self.assertTrue(ie.suitable(url), '%s should match URL %r' % (type(ie).__name__, url)) else: self.assertFalse( ie.suitable(url), '%s should not match URL %r . That URL belongs to %s.' % (type(ie).__name__, url, tc['name'])) def test_keywords(self): self.assertMatch(':ytsubs', ['youtube:subscriptions']) self.assertMatch(':ytsubscriptions', ['youtube:subscriptions']) self.assertMatch(':ythistory', ['youtube:history']) self.assertMatch(':thedailyshow', ['ComedyCentralShows']) self.assertMatch(':tds', ['ComedyCentralShows']) def test_vimeo_matching(self): self.assertMatch('https://vimeo.com/channels/tributes', ['vimeo:channel']) self.assertMatch('https://vimeo.com/channels/31259', ['vimeo:channel']) self.assertMatch('https://vimeo.com/channels/31259/53576664', ['vimeo']) self.assertMatch('https://vimeo.com/user7108434', ['vimeo:user']) self.assertMatch('https://vimeo.com/user7108434/videos', ['vimeo:user']) self.assertMatch('https://vimeo.com/user21297594/review/75524534/3c257a1b5d', ['vimeo:review']) # https://github.com/rg3/youtube-dl/issues/1930 def test_soundcloud_not_matching_sets(self): self.assertMatch('http://soundcloud.com/floex/sets/gone-ep', ['soundcloud:set']) def test_tumblr(self): self.assertMatch('http://tatianamaslanydaily.tumblr.com/post/54196191430/orphan-black-dvd-extra-behind-the-scenes', ['Tumblr']) self.assertMatch('http://tatianamaslanydaily.tumblr.com/post/54196191430', ['Tumblr']) def test_pbs(self): # https://github.com/rg3/youtube-dl/issues/2350 self.assertMatch('http://video.pbs.org/viralplayer/2365173446/', ['PBS']) self.assertMatch('http://video.pbs.org/widget/partnerplayer/980042464/', ['PBS']) def test_yahoo_https(self): # https://github.com/rg3/youtube-dl/issues/2701 self.assertMatch( 'https://screen.yahoo.com/smartwatches-latest-wearable-gadgets-163745379-cbs.html', ['Yahoo']) if __name__ == '__main__': unittest.main()
unlicense
elli0ttB/problems
sorting/quicksort.py
1
1524
#!/usr/bin/env python def quicksort(arr, partition): if (partition == "hoare"): quicksort_hoare(arr, 0, len(arr) -1) elif (partition == "lomuto"): quicksort_lomuto(arr, 0, len(arr) -1) else: raise ValueError() def quicksort_hoare(arr, lo, hi): # lo and hi follow standard method of being inclusive on the bottom, exclusive on the top. """Run a quicksort_hoare given a partition scheme""" if lo < hi: p = hoare(arr, lo, hi) quicksort_hoare(arr, lo, p) quicksort_hoare(arr, p+1, hi) def quicksort_lomuto(arr, lo, hi): # lo and hi follow standard method of being inclusive on the bottom, exclusive on the top. """Run a quicksort_lomuto given a partition scheme""" if lo < hi: p = lomuto(arr, lo, hi) quicksort_lomuto(arr, lo, p-1) quicksort_lomuto(arr, p+1, hi) def lomuto(arr, lo, hi): pivot = arr[hi] i = lo - 1 for j in range(lo, hi + 1): if arr[j] <= pivot: i += 1 arr[i], arr[j] = arr[j], arr[i] return i # we know that arr[i] = p def hoare(arr, lo, hi): pivot = arr[lo] i = lo - 1 j = hi + 1 while True: i, j = i+1, j-1 while arr[j] > pivot: j -= 1 while arr[i] < pivot: i += 1 if i < j: arr[i], arr[j] = arr[j], arr[i] else: return j def main(): import sort_test sort_test.test(lom) sort_test.test(hor) if __name__ == "__main__": main()
mit
lastgeniusleft2004/docker
vendor/src/github.com/hashicorp/go-msgpack/codec/msgpack_test.py
1232
3478
#!/usr/bin/env python # This will create golden files in a directory passed to it. # A Test calls this internally to create the golden files # So it can process them (so we don't have to checkin the files). import msgpack, msgpackrpc, sys, os, threading def get_test_data_list(): # get list with all primitive types, and a combo type l0 = [ -8, -1616, -32323232, -6464646464646464, 192, 1616, 32323232, 6464646464646464, 192, -3232.0, -6464646464.0, 3232.0, 6464646464.0, False, True, None, "someday", "", "bytestring", 1328176922000002000, -2206187877999998000, 0, -6795364578871345152 ] l1 = [ { "true": True, "false": False }, { "true": "True", "false": False, "uint16(1616)": 1616 }, { "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ], "int32":32323232, "bool": True, "LONG STRING": "123456789012345678901234567890123456789012345678901234567890", "SHORT STRING": "1234567890" }, { True: "true", 8: False, "false": 0 } ] l = [] l.extend(l0) l.append(l0) l.extend(l1) return l def build_test_data(destdir): l = get_test_data_list() for i in range(len(l)): packer = msgpack.Packer() serialized = packer.pack(l[i]) f = open(os.path.join(destdir, str(i) + '.golden'), 'wb') f.write(serialized) f.close() def doRpcServer(port, stopTimeSec): class EchoHandler(object): def Echo123(self, msg1, msg2, msg3): return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3)) def EchoStruct(self, msg): return ("%s" % msg) addr = msgpackrpc.Address('localhost', port) server = msgpackrpc.Server(EchoHandler()) server.listen(addr) # run thread to stop it after stopTimeSec seconds if > 0 if stopTimeSec > 0: def myStopRpcServer(): server.stop() t = threading.Timer(stopTimeSec, myStopRpcServer) t.start() server.start() def doRpcClientToPythonSvc(port): address = msgpackrpc.Address('localhost', port) client = msgpackrpc.Client(address, unpack_encoding='utf-8') print client.call("Echo123", "A1", "B2", "C3") print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"}) def doRpcClientToGoSvc(port): # print ">>>> port: ", port, " <<<<<" address = msgpackrpc.Address('localhost', port) client = msgpackrpc.Client(address, unpack_encoding='utf-8') print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"]) print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"}) def doMain(args): if len(args) == 2 and args[0] == "testdata": build_test_data(args[1]) elif len(args) == 3 and args[0] == "rpc-server": doRpcServer(int(args[1]), int(args[2])) elif len(args) == 2 and args[0] == "rpc-client-python-service": doRpcClientToPythonSvc(int(args[1])) elif len(args) == 2 and args[0] == "rpc-client-go-service": doRpcClientToGoSvc(int(args[1])) else: print("Usage: msgpack_test.py " + "[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...") if __name__ == "__main__": doMain(sys.argv[1:])
apache-2.0
jt6562/XX-Net
python27/1.0/lib/darwin/gevent/threadpool.py
26
11176
# Copyright (c) 2012 Denis Bilenko. See LICENSE for details. from __future__ import with_statement, absolute_import import sys import os from gevent.hub import get_hub, getcurrent, sleep, integer_types from gevent.event import AsyncResult from gevent.greenlet import Greenlet from gevent.pool import IMap, IMapUnordered from gevent.lock import Semaphore from gevent._threading import Lock, Queue, start_new_thread # XXX apply_e is ugly and must not be needed # XXX apply() should re-raise everything __all__ = ['ThreadPool', 'ThreadResult'] class ThreadPool(object): def __init__(self, maxsize, hub=None): if hub is None: hub = get_hub() self.hub = hub self._maxsize = 0 self.manager = None self.pid = os.getpid() self.fork_watcher = hub.loop.fork(ref=False) self._init(maxsize) def _set_maxsize(self, maxsize): if not isinstance(maxsize, integer_types): raise TypeError('maxsize must be integer: %r' % (maxsize, )) if maxsize < 0: raise ValueError('maxsize must not be negative: %r' % (maxsize, )) difference = maxsize - self._maxsize self._semaphore.counter += difference self._maxsize = maxsize self.adjust() # make sure all currently blocking spawn() start unlocking if maxsize increased self._semaphore._start_notify() def _get_maxsize(self): return self._maxsize maxsize = property(_get_maxsize, _set_maxsize) def __repr__(self): return '<%s at 0x%x %s/%s/%s>' % (self.__class__.__name__, id(self), len(self), self.size, self.maxsize) def __len__(self): # XXX just do unfinished_tasks property return self.task_queue.unfinished_tasks def _get_size(self): return self._size def _set_size(self, size): if size < 0: raise ValueError('Size of the pool cannot be negative: %r' % (size, )) if size > self._maxsize: raise ValueError('Size of the pool cannot be bigger than maxsize: %r > %r' % (size, self._maxsize)) if self.manager: self.manager.kill() while self._size < size: self._add_thread() delay = 0.0001 while self._size > size: while self._size - size > self.task_queue.unfinished_tasks: self.task_queue.put(None) if getcurrent() is self.hub: break sleep(delay) delay = min(delay * 2, .05) if self._size: self.fork_watcher.start(self._on_fork) else: self.fork_watcher.stop() size = property(_get_size, _set_size) def _init(self, maxsize): self._size = 0 self._semaphore = Semaphore(1) self._lock = Lock() self.task_queue = Queue() self._set_maxsize(maxsize) def _on_fork(self): # fork() only leaves one thread; also screws up locks; # let's re-create locks and threads pid = os.getpid() if pid != self.pid: self.pid = pid # Do not mix fork() and threads; since fork() only copies one thread # all objects referenced by other threads has refcount that will never # go down to 0. self._init(self._maxsize) def join(self): delay = 0.0005 while self.task_queue.unfinished_tasks > 0: sleep(delay) delay = min(delay * 2, .05) def kill(self): self.size = 0 def _adjust_step(self): # if there is a possibility & necessity for adding a thread, do it while self._size < self._maxsize and self.task_queue.unfinished_tasks > self._size: self._add_thread() # while the number of threads is more than maxsize, kill one # we do not check what's already in task_queue - it could be all Nones while self._size - self._maxsize > self.task_queue.unfinished_tasks: self.task_queue.put(None) if self._size: self.fork_watcher.start(self._on_fork) else: self.fork_watcher.stop() def _adjust_wait(self): delay = 0.0001 while True: self._adjust_step() if self._size <= self._maxsize: return sleep(delay) delay = min(delay * 2, .05) def adjust(self): self._adjust_step() if not self.manager and self._size > self._maxsize: # might need to feed more Nones into the pool self.manager = Greenlet.spawn(self._adjust_wait) def _add_thread(self): with self._lock: self._size += 1 try: start_new_thread(self._worker, ()) except: with self._lock: self._size -= 1 raise def spawn(self, func, *args, **kwargs): while True: semaphore = self._semaphore semaphore.acquire() if semaphore is self._semaphore: break try: task_queue = self.task_queue result = AsyncResult() thread_result = ThreadResult(result, hub=self.hub) task_queue.put((func, args, kwargs, thread_result)) self.adjust() # rawlink() must be the last call result.rawlink(lambda *args: self._semaphore.release()) # XXX this _semaphore.release() is competing for order with get() # XXX this is not good, just make ThreadResult release the semaphore before doing anything else except: semaphore.release() raise return result def _decrease_size(self): if sys is None: return _lock = getattr(self, '_lock', None) if _lock is not None: with _lock: self._size -= 1 def _worker(self): need_decrease = True try: while True: task_queue = self.task_queue task = task_queue.get() try: if task is None: need_decrease = False self._decrease_size() # we want first to decrease size, then decrease unfinished_tasks # otherwise, _adjust might think there's one more idle thread that # needs to be killed return func, args, kwargs, result = task try: value = func(*args, **kwargs) except: exc_info = getattr(sys, 'exc_info', None) if exc_info is None: return result.handle_error((self, func), exc_info()) else: if sys is None: return result.set(value) del value finally: del func, args, kwargs, result, task finally: if sys is None: return task_queue.task_done() finally: if need_decrease: self._decrease_size() # XXX apply() should re-raise error by default # XXX because that's what builtin apply does # XXX check gevent.pool.Pool.apply and multiprocessing.Pool.apply def apply_e(self, expected_errors, function, args=None, kwargs=None): if args is None: args = () if kwargs is None: kwargs = {} success, result = self.spawn(wrap_errors, expected_errors, function, args, kwargs).get() if success: return result raise result def apply(self, func, args=None, kwds=None): """Equivalent of the apply() builtin function. It blocks till the result is ready.""" if args is None: args = () if kwds is None: kwds = {} return self.spawn(func, *args, **kwds).get() def apply_cb(self, func, args=None, kwds=None, callback=None): result = self.apply(func, args, kwds) if callback is not None: callback(result) return result def apply_async(self, func, args=None, kwds=None, callback=None): """A variant of the apply() method which returns a Greenlet object. If callback is specified then it should be a callable which accepts a single argument. When the result becomes ready callback is applied to it (unless the call failed).""" if args is None: args = () if kwds is None: kwds = {} return Greenlet.spawn(self.apply_cb, func, args, kwds, callback) def map(self, func, iterable): return list(self.imap(func, iterable)) def map_cb(self, func, iterable, callback=None): result = self.map(func, iterable) if callback is not None: callback(result) return result def map_async(self, func, iterable, callback=None): """ A variant of the map() method which returns a Greenlet object. If callback is specified then it should be a callable which accepts a single argument. """ return Greenlet.spawn(self.map_cb, func, iterable, callback) def imap(self, func, iterable): """An equivalent of itertools.imap()""" return IMap.spawn(func, iterable, spawn=self.spawn) def imap_unordered(self, func, iterable): """The same as imap() except that the ordering of the results from the returned iterator should be considered in arbitrary order.""" return IMapUnordered.spawn(func, iterable, spawn=self.spawn) class ThreadResult(object): def __init__(self, receiver, hub=None): if hub is None: hub = get_hub() self.receiver = receiver self.hub = hub self.value = None self.context = None self.exc_info = None self.async = hub.loop.async() self.async.start(self._on_async) def _on_async(self): self.async.stop() try: if self.exc_info is not None: try: self.hub.handle_error(self.context, *self.exc_info) finally: self.exc_info = None self.context = None self.async = None self.hub = None if self.receiver is not None: # XXX exception!!!? self.receiver(self) finally: self.receiver = None self.value = None def set(self, value): self.value = value self.async.send() def handle_error(self, context, exc_info): self.context = context self.exc_info = exc_info self.async.send() # link protocol: def successful(self): return True def wrap_errors(errors, function, args, kwargs): try: return True, function(*args, **kwargs) except errors: return False, sys.exc_info()[1]
bsd-2-clause
Turgon37/OpenVPN_UAM
OpenVPNUAM/pki/pki_filetree.py
1
6143
# -*- coding: utf8 -*- # This file is a part of OpenVPN-UAM # # Copyright (c) 2015 Thomas PAJON, Pierre GINDRAUD # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """PKI - Public Key Infrastructure File Tree program class This class is responsive of management of all SSL files """ # System imports import logging import os import OpenSSL from OpenSSL import crypto from OpenSSL.crypto import (_lib as lib, _ffi as ffi) # Project imports from ..config import Error # Global project declarations g_sys_log = logging.getLogger('openvpn-uam.pki.file') class PKIFileTree(object): """Build an instance of the pki model class This instance must be called in the openvpn uam program class """ def __init__(self, confparser): """Constructor : Build a new PKI API instance """ self.__cp = confparser # the root path of file tree self.__new_cert_directory = "certificates/" # the cipher to use for private key encryption self.__cipher = "DES3" def load(self): """Return a boolean indicates if PKI is ready to work or not This function check things required by PKI working and return a boolean that indicates if the PKI is ready to work with certificate or not @return [bool] The ready status """ # check PKI section in configuration file if not self.__cp.has_section(self.__cp.PKI_SECTION): g_sys_log.error('Missing pki section in configuration file') return False sec = self.__cp.getItems(self.__cp.PKI_SECTION) # read the new cert directory path from config file self.__new_cert_directory = self.__cp.get( self.__cp.PKI_SECTION, 'cert_directory', fallback=self.__new_cert_directory).rstrip('/') + '/' self.__cipher = self.__cp.get( self.__cp.PKI_SECTION, 'cert_key_cipher', fallback=self.__cipher) # BAD USAGE but no other solution if lib.EVP_get_cipherbyname(self.__cipher.encode()) == ffi.NULL: g_sys_log.fatal("Invalid cipher name") return False if not self.makePath(self.__new_cert_directory): g_sys_log.fatal("Certificate directory is invalid") return False return True # Tools def makePath(self, path): """Ensure that the given path is builded on the file system @param path [str] the path to check for @return [bool] True if the entire path is existing on the FS False if an error happen """ p = "" for folder in path.split('/'): if len(folder) == 0: continue p += folder + '/' if not os.path.exists(p): # create it g_sys_log.info("Creating directory '%s'", p) try: os.mkdir(p) except OSError as e: g_sys_log.error("File '%s' already exist", p) return False # if cert path already exist else: # check if it is a valid directory if not os.path.isdir(p): g_sys_log.error("File '%s' is not a directory", p) return False return True # API def storeBytesToFile(self, content, path): """Write a list of bytes into a file @param content [bytes/str] the content to write into the file @param path [str] the path to the file into """ f = None if os.path.exists(path): g_sys_log.error("Error during export of file '%s'.", path) return if isinstance(content, bytes): # open output file in binary mode f = open(path, "wb") elif isinstance(content, str): # open output file in text mode f = open(path, "wt") assert f is not None f.write(content) f.close() def storePKIUserCertificate(self, user, hostname, certificate, obj, password=None): """Store a given PKI object into a file @param user [User] the user to which the certificate is associated @param hostname [Hostname] the hostname to which the certificate is associated @param certificate [Certificate] the Certificate instance associated with the file @param obj [X509/PKey] The object that will be dump to the file @param password [str] OPTIONNAL : an optionnal passphrase to use for encrypt the output (if available) """ path = (self.__new_cert_directory + str(user.id) + "/" + str(hostname.id) + "/") self.makePath(path) bytes_ = None if isinstance(obj, OpenSSL.crypto.X509): bytes_ = crypto.dump_certificate(crypto.FILETYPE_PEM, obj) path += str(certificate.id) + ".crt" if isinstance(obj, OpenSSL.crypto.X509Req): bytes_ = crypto.dump_certificate_request(crypto.FILETYPE_PEM, obj) path += str(certificate.id) + ".csr" elif isinstance(obj, OpenSSL.crypto.PKey): if isinstance(password, str): bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj, self.__cipher, password.encode()) else: bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj) path += str(certificate.id) + ".key" assert bytes_ is not None self.storeBytesToFile(bytes_, path)
gpl-3.0
RNAcentral/rnacentral-import-pipeline
rnacentral_pipeline/databases/pirbase/fetch.py
1
1316
# -*- coding: utf-8 -*- """ Copyright [2009-2020] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import urllib from pathlib import Path import typing as ty from furl import furl import requests from bs4 import BeautifulSoup def base_url(url: furl) -> furl: base = furl(url) base.path.segments = base.path.segments[:-1] return base def extract_urls(base: furl, document: str) -> ty.List[furl]: soup = BeautifulSoup(document) urls = [] links = soup.find("table").find_all("a") for link in links: href = link.get("href") if href.endswith("json.gz"): urls.append(base / href) return urls def find_urls(url: furl): response = requests.get(url.url) response.raise_for_status() return extract_urls(base_url(url), response.text)
apache-2.0
ATIX-AG/ansible
lib/ansible/modules/network/vyos/vyos_vlan.py
22
9218
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2017, Ansible by Red Hat, inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = """ --- module: vyos_vlan version_added: "2.5" author: "Trishna Guha (@trishnaguha)" short_description: Manage VLANs on VyOS network devices description: - This module provides declarative management of VLANs on VyOS network devices. notes: - Tested against VYOS 1.1.7 options: name: description: - Name of the VLAN. address: description: - Configure Virtual interface address. vlan_id: description: - ID of the VLAN. Range 0-4094. required: true interfaces: description: - List of interfaces that should be associated to the VLAN. required: true associated_interfaces: description: - This is a intent option and checks the operational state of the for given vlan C(name) for associated interfaces. If the value in the C(associated_interfaces) does not match with the operational state of vlan on device it will result in failure. version_added: "2.5" delay: description: - Delay the play should wait to check for declarative intent params values. default: 10 aggregate: description: List of VLANs definitions. purge: description: - Purge VLANs not defined in the I(aggregate) parameter. default: no state: description: - State of the VLAN configuration. default: present choices: ['present', 'absent'] extends_documentation_fragment: vyos """ EXAMPLES = """ - name: Create vlan vyos_vlan: vlan_id: 100 name: vlan-100 interfaces: eth1 state: present - name: Add interfaces to VLAN vyos_vlan: vlan_id: 100 interfaces: - eth1 - eth2 - name: Configure virtual interface address vyos_vlan: vlan_id: 100 interfaces: eth1 address: 172.26.100.37/24 - name: vlan interface config + intent vyos_vlan: vlan_id: 100 interfaces: eth0 associated_interfaces: - eth0 - name: vlan intent check vyos_vlan: vlan_id: 100 associated_interfaces: - eth3 - eth4 - name: Delete vlan vyos_vlan: vlan_id: 100 interfaces: eth1 state: absent """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always type: list sample: - set interfaces ethernet eth1 vif 100 description VLAN 100 - set interfaces ethernet eth1 vif 100 address 172.26.100.37/24 - delete interfaces ethernet eth1 vif 100 """ import re import time from copy import deepcopy from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.common.utils import remove_default_spec from ansible.module_utils.network.vyos.vyos import load_config, run_commands from ansible.module_utils.network.vyos.vyos import vyos_argument_spec def search_obj_in_list(vlan_id, lst): obj = list() for o in lst: if o['vlan_id'] == vlan_id: obj.append(o) return obj def map_obj_to_commands(updates, module): commands = list() want, have = updates purge = module.params['purge'] for w in want: vlan_id = w['vlan_id'] name = w['name'] address = w['address'] state = w['state'] interfaces = w['interfaces'] obj_in_have = search_obj_in_list(vlan_id, have) if state == 'absent': if obj_in_have: for obj in obj_in_have: for i in obj['interfaces']: commands.append('delete interfaces ethernet {0} vif {1}'.format(i, vlan_id)) elif state == 'present': if not obj_in_have: if w['interfaces'] and w['vlan_id']: for i in w['interfaces']: cmd = 'set interfaces ethernet {0} vif {1}'.format(i, vlan_id) if w['name']: commands.append(cmd + ' description {}'.format(name)) elif w['address']: commands.append(cmd + ' address {}'.format(address)) else: commands.append(cmd) if purge: for h in have: obj_in_want = search_obj_in_list(h['vlan_id'], want) if not obj_in_want: for i in h['interfaces']: commands.append('delete interfaces ethernet {0} vif {1}'.format(i, h['vlan_id'])) return commands def map_params_to_obj(module): obj = [] aggregate = module.params.get('aggregate') if aggregate: for item in aggregate: for key in item: if item.get(key) is None: item[key] = module.params[key] d = item.copy() d['vlan_id'] = str(d['vlan_id']) module._check_required_one_of(module.required_one_of, item) obj.append(d) else: obj.append({ 'vlan_id': str(module.params['vlan_id']), 'name': module.params['name'], 'address': module.params['address'], 'state': module.params['state'], 'interfaces': module.params['interfaces'], 'associated_interfaces': module.params['associated_interfaces'] }) return obj def map_config_to_obj(module): objs = [] interfaces = list() output = run_commands(module, 'show interfaces') lines = output[0].strip().splitlines()[3:] for l in lines: splitted_line = re.split(r'\s{2,}', l.strip()) obj = {} eth = splitted_line[0].strip("'") if eth.startswith('eth'): obj['interfaces'] = [] if '.' in eth: interface = eth.split('.')[0] obj['interfaces'].append(interface) obj['vlan_id'] = eth.split('.')[-1] else: obj['interfaces'].append(eth) obj['vlan_id'] = None if splitted_line[1].strip("'") != '-': obj['address'] = splitted_line[1].strip("'") if len(splitted_line) > 3: obj['name'] = splitted_line[3].strip("'") obj['state'] = 'present' objs.append(obj) return objs def check_declarative_intent_params(want, module, result): have = None obj_interface = list() is_delay = False for w in want: if w.get('associated_interfaces') is None: continue if result['changed'] and not is_delay: time.sleep(module.params['delay']) is_delay = True if have is None: have = map_config_to_obj(module) obj_in_have = search_obj_in_list(w['vlan_id'], have) if obj_in_have: for obj in obj_in_have: obj_interface.extend(obj['interfaces']) for w in want: if w.get('associated_interfaces') is None: continue for i in w['associated_interfaces']: if (set(obj_interface) - set(w['associated_interfaces'])) != set([]): module.fail_json(msg='Interface {0} not configured on vlan {1}'.format(i, w['vlan_id'])) def main(): """ main entry point for module execution """ element_spec = dict( vlan_id=dict(type='int', required=True), name=dict(), address=dict(), interfaces=dict(type='list'), associated_interfaces=dict(type='list'), delay=dict(default=10, type='int'), state=dict(default='present', choices=['present', 'absent']) ) aggregate_spec = deepcopy(element_spec) # remove default in aggregate spec, to handle common arguments remove_default_spec(aggregate_spec) argument_spec = dict( aggregate=dict(type='list', elements='dict', options=aggregate_spec), purge=dict(default=False, type='bool') ) argument_spec.update(element_spec) argument_spec.update(vyos_argument_spec) required_one_of = [['vlan_id', 'aggregate'], ['interfaces', 'associated_interfaces']] mutually_exclusive = [['vlan_id', 'aggregate']] module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=required_one_of, mutually_exclusive=mutually_exclusive) warnings = list() result = {'changed': False} if warnings: result['warnings'] = warnings want = map_params_to_obj(module) have = map_config_to_obj(module) commands = map_obj_to_commands((want, have), module) result['commands'] = commands if commands: commit = not module.check_mode load_config(module, commands, commit=commit) result['changed'] = True check_declarative_intent_params(want, module, result) module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
xombiemp/CouchPotatoServer
libs/guessit/transfo/guess_bonus_features.py
150
2155
#!/usr/bin/env python # -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2012 Nicolas Wack <wackou@gmail.com> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals from guessit.transfo import found_property import logging log = logging.getLogger(__name__) def process(mtree): def previous_group(g): for leaf in mtree.unidentified_leaves()[::-1]: if leaf.node_idx < g.node_idx: return leaf def next_group(g): for leaf in mtree.unidentified_leaves(): if leaf.node_idx > g.node_idx: return leaf def same_group(g1, g2): return g1.node_idx[:2] == g2.node_idx[:2] bonus = [ node for node in mtree.leaves() if 'bonusNumber' in node.guess ] if bonus: bonusTitle = next_group(bonus[0]) if same_group(bonusTitle, bonus[0]): found_property(bonusTitle, 'bonusTitle', 0.8) filmNumber = [ node for node in mtree.leaves() if 'filmNumber' in node.guess ] if filmNumber: filmSeries = previous_group(filmNumber[0]) found_property(filmSeries, 'filmSeries', 0.9) title = next_group(filmNumber[0]) found_property(title, 'title', 0.9) season = [ node for node in mtree.leaves() if 'season' in node.guess ] if season and 'bonusNumber' in mtree.info: series = previous_group(season[0]) if same_group(series, season[0]): found_property(series, 'series', 0.9)
gpl-3.0
zhenv5/scikit-learn
examples/classification/plot_lda.py
70
2413
""" ==================================================================== Normal and Shrinkage Linear Discriminant Analysis for classification ==================================================================== Shows how shrinkage improves classification. """ from __future__ import division import numpy as np import matplotlib.pyplot as plt from sklearn.datasets import make_blobs from sklearn.discriminant_analysis import LinearDiscriminantAnalysis n_train = 20 # samples for training n_test = 200 # samples for testing n_averages = 50 # how often to repeat classification n_features_max = 75 # maximum number of features step = 4 # step size for the calculation def generate_data(n_samples, n_features): """Generate random blob-ish data with noisy features. This returns an array of input data with shape `(n_samples, n_features)` and an array of `n_samples` target labels. Only one feature contains discriminative information, the other features contain only noise. """ X, y = make_blobs(n_samples=n_samples, n_features=1, centers=[[-2], [2]]) # add non-discriminative features if n_features > 1: X = np.hstack([X, np.random.randn(n_samples, n_features - 1)]) return X, y acc_clf1, acc_clf2 = [], [] n_features_range = range(1, n_features_max + 1, step) for n_features in n_features_range: score_clf1, score_clf2 = 0, 0 for _ in range(n_averages): X, y = generate_data(n_train, n_features) clf1 = LinearDiscriminantAnalysis(solver='lsqr', shrinkage='auto').fit(X, y) clf2 = LinearDiscriminantAnalysis(solver='lsqr', shrinkage=None).fit(X, y) X, y = generate_data(n_test, n_features) score_clf1 += clf1.score(X, y) score_clf2 += clf2.score(X, y) acc_clf1.append(score_clf1 / n_averages) acc_clf2.append(score_clf2 / n_averages) features_samples_ratio = np.array(n_features_range) / n_train plt.plot(features_samples_ratio, acc_clf1, linewidth=2, label="Linear Discriminant Analysis with shrinkage", color='r') plt.plot(features_samples_ratio, acc_clf2, linewidth=2, label="Linear Discriminant Analysis", color='g') plt.xlabel('n_features / n_samples') plt.ylabel('Classification accuracy') plt.legend(loc=1, prop={'size': 12}) plt.suptitle('Linear Discriminant Analysis vs. \ shrinkage Linear Discriminant Analysis (1 discriminative feature)') plt.show()
bsd-3-clause
40223249-1/-w16b_test
static/Brython3.1.1-20150328-091302/Lib/xml/dom/xmlbuilder.py
873
12377
"""Implementation of the DOM Level 3 'LS-Load' feature.""" import copy import xml.dom from xml.dom.NodeFilter import NodeFilter __all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] class Options: """Features object that has variables set for each DOMBuilder feature. The DOMBuilder class uses an instance of this class to pass settings to the ExpatBuilder class. """ # Note that the DOMBuilder class in LoadSave constrains which of these # values can be set using the DOM Level 3 LoadSave feature. namespaces = 1 namespace_declarations = True validation = False external_parameter_entities = True external_general_entities = True external_dtd_subset = True validate_if_schema = False validate = False datatype_normalization = False create_entity_ref_nodes = True entities = True whitespace_in_element_content = True cdata_sections = True comments = True charset_overrides_xml_encoding = True infoset = False supported_mediatypes_only = False errorHandler = None filter = None class DOMBuilder: entityResolver = None errorHandler = None filter = None ACTION_REPLACE = 1 ACTION_APPEND_AS_CHILDREN = 2 ACTION_INSERT_AFTER = 3 ACTION_INSERT_BEFORE = 4 _legal_actions = (ACTION_REPLACE, ACTION_APPEND_AS_CHILDREN, ACTION_INSERT_AFTER, ACTION_INSERT_BEFORE) def __init__(self): self._options = Options() def _get_entityResolver(self): return self.entityResolver def _set_entityResolver(self, entityResolver): self.entityResolver = entityResolver def _get_errorHandler(self): return self.errorHandler def _set_errorHandler(self, errorHandler): self.errorHandler = errorHandler def _get_filter(self): return self.filter def _set_filter(self, filter): self.filter = filter def setFeature(self, name, state): if self.supportsFeature(name): state = state and 1 or 0 try: settings = self._settings[(_name_xform(name), state)] except KeyError: raise xml.dom.NotSupportedErr( "unsupported feature: %r" % (name,)) else: for name, value in settings: setattr(self._options, name, value) else: raise xml.dom.NotFoundErr("unknown feature: " + repr(name)) def supportsFeature(self, name): return hasattr(self._options, _name_xform(name)) def canSetFeature(self, name, state): key = (_name_xform(name), state and 1 or 0) return key in self._settings # This dictionary maps from (feature,value) to a list of # (option,value) pairs that should be set on the Options object. # If a (feature,value) setting is not in this dictionary, it is # not supported by the DOMBuilder. # _settings = { ("namespace_declarations", 0): [ ("namespace_declarations", 0)], ("namespace_declarations", 1): [ ("namespace_declarations", 1)], ("validation", 0): [ ("validation", 0)], ("external_general_entities", 0): [ ("external_general_entities", 0)], ("external_general_entities", 1): [ ("external_general_entities", 1)], ("external_parameter_entities", 0): [ ("external_parameter_entities", 0)], ("external_parameter_entities", 1): [ ("external_parameter_entities", 1)], ("validate_if_schema", 0): [ ("validate_if_schema", 0)], ("create_entity_ref_nodes", 0): [ ("create_entity_ref_nodes", 0)], ("create_entity_ref_nodes", 1): [ ("create_entity_ref_nodes", 1)], ("entities", 0): [ ("create_entity_ref_nodes", 0), ("entities", 0)], ("entities", 1): [ ("entities", 1)], ("whitespace_in_element_content", 0): [ ("whitespace_in_element_content", 0)], ("whitespace_in_element_content", 1): [ ("whitespace_in_element_content", 1)], ("cdata_sections", 0): [ ("cdata_sections", 0)], ("cdata_sections", 1): [ ("cdata_sections", 1)], ("comments", 0): [ ("comments", 0)], ("comments", 1): [ ("comments", 1)], ("charset_overrides_xml_encoding", 0): [ ("charset_overrides_xml_encoding", 0)], ("charset_overrides_xml_encoding", 1): [ ("charset_overrides_xml_encoding", 1)], ("infoset", 0): [], ("infoset", 1): [ ("namespace_declarations", 0), ("validate_if_schema", 0), ("create_entity_ref_nodes", 0), ("entities", 0), ("cdata_sections", 0), ("datatype_normalization", 1), ("whitespace_in_element_content", 1), ("comments", 1), ("charset_overrides_xml_encoding", 1)], ("supported_mediatypes_only", 0): [ ("supported_mediatypes_only", 0)], ("namespaces", 0): [ ("namespaces", 0)], ("namespaces", 1): [ ("namespaces", 1)], } def getFeature(self, name): xname = _name_xform(name) try: return getattr(self._options, xname) except AttributeError: if name == "infoset": options = self._options return (options.datatype_normalization and options.whitespace_in_element_content and options.comments and options.charset_overrides_xml_encoding and not (options.namespace_declarations or options.validate_if_schema or options.create_entity_ref_nodes or options.entities or options.cdata_sections)) raise xml.dom.NotFoundErr("feature %s not known" % repr(name)) def parseURI(self, uri): if self.entityResolver: input = self.entityResolver.resolveEntity(None, uri) else: input = DOMEntityResolver().resolveEntity(None, uri) return self.parse(input) def parse(self, input): options = copy.copy(self._options) options.filter = self.filter options.errorHandler = self.errorHandler fp = input.byteStream if fp is None and options.systemId: import urllib.request fp = urllib.request.urlopen(input.systemId) return self._parse_bytestream(fp, options) def parseWithContext(self, input, cnode, action): if action not in self._legal_actions: raise ValueError("not a legal action") raise NotImplementedError("Haven't written this yet...") def _parse_bytestream(self, stream, options): import xml.dom.expatbuilder builder = xml.dom.expatbuilder.makeBuilder(options) return builder.parseFile(stream) def _name_xform(name): return name.lower().replace('-', '_') class DOMEntityResolver(object): __slots__ = '_opener', def resolveEntity(self, publicId, systemId): assert systemId is not None source = DOMInputSource() source.publicId = publicId source.systemId = systemId source.byteStream = self._get_opener().open(systemId) # determine the encoding if the transport provided it source.encoding = self._guess_media_encoding(source) # determine the base URI is we can import posixpath, urllib.parse parts = urllib.parse.urlparse(systemId) scheme, netloc, path, params, query, fragment = parts # XXX should we check the scheme here as well? if path and not path.endswith("/"): path = posixpath.dirname(path) + "/" parts = scheme, netloc, path, params, query, fragment source.baseURI = urllib.parse.urlunparse(parts) return source def _get_opener(self): try: return self._opener except AttributeError: self._opener = self._create_opener() return self._opener def _create_opener(self): import urllib.request return urllib.request.build_opener() def _guess_media_encoding(self, source): info = source.byteStream.info() if "Content-Type" in info: for param in info.getplist(): if param.startswith("charset="): return param.split("=", 1)[1].lower() class DOMInputSource(object): __slots__ = ('byteStream', 'characterStream', 'stringData', 'encoding', 'publicId', 'systemId', 'baseURI') def __init__(self): self.byteStream = None self.characterStream = None self.stringData = None self.encoding = None self.publicId = None self.systemId = None self.baseURI = None def _get_byteStream(self): return self.byteStream def _set_byteStream(self, byteStream): self.byteStream = byteStream def _get_characterStream(self): return self.characterStream def _set_characterStream(self, characterStream): self.characterStream = characterStream def _get_stringData(self): return self.stringData def _set_stringData(self, data): self.stringData = data def _get_encoding(self): return self.encoding def _set_encoding(self, encoding): self.encoding = encoding def _get_publicId(self): return self.publicId def _set_publicId(self, publicId): self.publicId = publicId def _get_systemId(self): return self.systemId def _set_systemId(self, systemId): self.systemId = systemId def _get_baseURI(self): return self.baseURI def _set_baseURI(self, uri): self.baseURI = uri class DOMBuilderFilter: """Element filter which can be used to tailor construction of a DOM instance. """ # There's really no need for this class; concrete implementations # should just implement the endElement() and startElement() # methods as appropriate. Using this makes it easy to only # implement one of them. FILTER_ACCEPT = 1 FILTER_REJECT = 2 FILTER_SKIP = 3 FILTER_INTERRUPT = 4 whatToShow = NodeFilter.SHOW_ALL def _get_whatToShow(self): return self.whatToShow def acceptNode(self, element): return self.FILTER_ACCEPT def startContainer(self, element): return self.FILTER_ACCEPT del NodeFilter class DocumentLS: """Mixin to create documents that conform to the load/save spec.""" async = False def _get_async(self): return False def _set_async(self, async): if async: raise xml.dom.NotSupportedErr( "asynchronous document loading is not supported") def abort(self): # What does it mean to "clear" a document? Does the # documentElement disappear? raise NotImplementedError( "haven't figured out what this means yet") def load(self, uri): raise NotImplementedError("haven't written this yet") def loadXML(self, source): raise NotImplementedError("haven't written this yet") def saveXML(self, snode): if snode is None: snode = self elif snode.ownerDocument is not self: raise xml.dom.WrongDocumentErr() return snode.toxml() class DOMImplementationLS: MODE_SYNCHRONOUS = 1 MODE_ASYNCHRONOUS = 2 def createDOMBuilder(self, mode, schemaType): if schemaType is not None: raise xml.dom.NotSupportedErr( "schemaType not yet supported") if mode == self.MODE_SYNCHRONOUS: return DOMBuilder() if mode == self.MODE_ASYNCHRONOUS: raise xml.dom.NotSupportedErr( "asynchronous builders are not supported") raise ValueError("unknown value for mode") def createDOMWriter(self): raise NotImplementedError( "the writer interface hasn't been written yet!") def createDOMInputSource(self): return DOMInputSource()
agpl-3.0
endolith/scipy
scipy/linalg/tests/test_special_matrices.py
12
26711
import pytest import numpy as np from numpy import arange, add, array, eye, copy, sqrt from numpy.testing import (assert_equal, assert_array_equal, assert_array_almost_equal, assert_allclose) from pytest import raises as assert_raises from scipy.fft import fft from scipy.special import comb from scipy.linalg import (toeplitz, hankel, circulant, hadamard, leslie, dft, companion, tri, triu, tril, kron, block_diag, helmert, hilbert, invhilbert, pascal, invpascal, fiedler, fiedler_companion, eigvals, convolution_matrix) from numpy.linalg import cond def get_mat(n): data = arange(n) data = add.outer(data, data) return data class TestTri: def test_basic(self): assert_equal(tri(4), array([[1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0], [1, 1, 1, 1]])) assert_equal(tri(4, dtype='f'), array([[1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0], [1, 1, 1, 1]], 'f')) def test_diag(self): assert_equal(tri(4, k=1), array([[1, 1, 0, 0], [1, 1, 1, 0], [1, 1, 1, 1], [1, 1, 1, 1]])) assert_equal(tri(4, k=-1), array([[0, 0, 0, 0], [1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0]])) def test_2d(self): assert_equal(tri(4, 3), array([[1, 0, 0], [1, 1, 0], [1, 1, 1], [1, 1, 1]])) assert_equal(tri(3, 4), array([[1, 0, 0, 0], [1, 1, 0, 0], [1, 1, 1, 0]])) def test_diag2d(self): assert_equal(tri(3, 4, k=2), array([[1, 1, 1, 0], [1, 1, 1, 1], [1, 1, 1, 1]])) assert_equal(tri(4, 3, k=-2), array([[0, 0, 0], [0, 0, 0], [1, 0, 0], [1, 1, 0]])) class TestTril: def test_basic(self): a = (100*get_mat(5)).astype('l') b = a.copy() for k in range(5): for l in range(k+1, 5): b[k, l] = 0 assert_equal(tril(a), b) def test_diag(self): a = (100*get_mat(5)).astype('f') b = a.copy() for k in range(5): for l in range(k+3, 5): b[k, l] = 0 assert_equal(tril(a, k=2), b) b = a.copy() for k in range(5): for l in range(max((k-1, 0)), 5): b[k, l] = 0 assert_equal(tril(a, k=-2), b) class TestTriu: def test_basic(self): a = (100*get_mat(5)).astype('l') b = a.copy() for k in range(5): for l in range(k+1, 5): b[l, k] = 0 assert_equal(triu(a), b) def test_diag(self): a = (100*get_mat(5)).astype('f') b = a.copy() for k in range(5): for l in range(max((k-1, 0)), 5): b[l, k] = 0 assert_equal(triu(a, k=2), b) b = a.copy() for k in range(5): for l in range(k+3, 5): b[l, k] = 0 assert_equal(triu(a, k=-2), b) class TestToeplitz: def test_basic(self): y = toeplitz([1, 2, 3]) assert_array_equal(y, [[1, 2, 3], [2, 1, 2], [3, 2, 1]]) y = toeplitz([1, 2, 3], [1, 4, 5]) assert_array_equal(y, [[1, 4, 5], [2, 1, 4], [3, 2, 1]]) def test_complex_01(self): data = (1.0 + arange(3.0)) * (1.0 + 1.0j) x = copy(data) t = toeplitz(x) # Calling toeplitz should not change x. assert_array_equal(x, data) # According to the docstring, x should be the first column of t. col0 = t[:, 0] assert_array_equal(col0, data) assert_array_equal(t[0, 1:], data[1:].conj()) def test_scalar_00(self): """Scalar arguments still produce a 2D array.""" t = toeplitz(10) assert_array_equal(t, [[10]]) t = toeplitz(10, 20) assert_array_equal(t, [[10]]) def test_scalar_01(self): c = array([1, 2, 3]) t = toeplitz(c, 1) assert_array_equal(t, [[1], [2], [3]]) def test_scalar_02(self): c = array([1, 2, 3]) t = toeplitz(c, array(1)) assert_array_equal(t, [[1], [2], [3]]) def test_scalar_03(self): c = array([1, 2, 3]) t = toeplitz(c, array([1])) assert_array_equal(t, [[1], [2], [3]]) def test_scalar_04(self): r = array([10, 2, 3]) t = toeplitz(1, r) assert_array_equal(t, [[1, 2, 3]]) class TestHankel: def test_basic(self): y = hankel([1, 2, 3]) assert_array_equal(y, [[1, 2, 3], [2, 3, 0], [3, 0, 0]]) y = hankel([1, 2, 3], [3, 4, 5]) assert_array_equal(y, [[1, 2, 3], [2, 3, 4], [3, 4, 5]]) class TestCirculant: def test_basic(self): y = circulant([1, 2, 3]) assert_array_equal(y, [[1, 3, 2], [2, 1, 3], [3, 2, 1]]) class TestHadamard: def test_basic(self): y = hadamard(1) assert_array_equal(y, [[1]]) y = hadamard(2, dtype=float) assert_array_equal(y, [[1.0, 1.0], [1.0, -1.0]]) y = hadamard(4) assert_array_equal(y, [[1, 1, 1, 1], [1, -1, 1, -1], [1, 1, -1, -1], [1, -1, -1, 1]]) assert_raises(ValueError, hadamard, 0) assert_raises(ValueError, hadamard, 5) class TestLeslie: def test_bad_shapes(self): assert_raises(ValueError, leslie, [[1, 1], [2, 2]], [3, 4, 5]) assert_raises(ValueError, leslie, [3, 4, 5], [[1, 1], [2, 2]]) assert_raises(ValueError, leslie, [1, 2], [1, 2]) assert_raises(ValueError, leslie, [1], []) def test_basic(self): a = leslie([1, 2, 3], [0.25, 0.5]) expected = array([[1.0, 2.0, 3.0], [0.25, 0.0, 0.0], [0.0, 0.5, 0.0]]) assert_array_equal(a, expected) class TestCompanion: def test_bad_shapes(self): assert_raises(ValueError, companion, [[1, 1], [2, 2]]) assert_raises(ValueError, companion, [0, 4, 5]) assert_raises(ValueError, companion, [1]) assert_raises(ValueError, companion, []) def test_basic(self): c = companion([1, 2, 3]) expected = array([ [-2.0, -3.0], [1.0, 0.0]]) assert_array_equal(c, expected) c = companion([2.0, 5.0, -10.0]) expected = array([ [-2.5, 5.0], [1.0, 0.0]]) assert_array_equal(c, expected) class TestBlockDiag: def test_basic(self): x = block_diag(eye(2), [[1, 2], [3, 4], [5, 6]], [[1, 2, 3]]) assert_array_equal(x, [[1, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0], [0, 0, 1, 2, 0, 0, 0], [0, 0, 3, 4, 0, 0, 0], [0, 0, 5, 6, 0, 0, 0], [0, 0, 0, 0, 1, 2, 3]]) def test_dtype(self): x = block_diag([[1.5]]) assert_equal(x.dtype, float) x = block_diag([[True]]) assert_equal(x.dtype, bool) def test_mixed_dtypes(self): actual = block_diag([[1]], [[1j]]) desired = np.array([[1, 0], [0, 1j]]) assert_array_equal(actual, desired) def test_scalar_and_1d_args(self): a = block_diag(1) assert_equal(a.shape, (1, 1)) assert_array_equal(a, [[1]]) a = block_diag([2, 3], 4) assert_array_equal(a, [[2, 3, 0], [0, 0, 4]]) def test_bad_arg(self): assert_raises(ValueError, block_diag, [[[1]]]) def test_no_args(self): a = block_diag() assert_equal(a.ndim, 2) assert_equal(a.nbytes, 0) def test_empty_matrix_arg(self): # regression test for gh-4596: check the shape of the result # for empty matrix inputs. Empty matrices are no longer ignored # (gh-4908) it is viewed as a shape (1, 0) matrix. a = block_diag([[1, 0], [0, 1]], [], [[2, 3], [4, 5], [6, 7]]) assert_array_equal(a, [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 2, 3], [0, 0, 4, 5], [0, 0, 6, 7]]) def test_zerosized_matrix_arg(self): # test for gh-4908: check the shape of the result for # zero-sized matrix inputs, i.e. matrices with shape (0,n) or (n,0). # note that [[]] takes shape (1,0) a = block_diag([[1, 0], [0, 1]], [[]], [[2, 3], [4, 5], [6, 7]], np.zeros([0, 2], dtype='int32')) assert_array_equal(a, [[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 2, 3, 0, 0], [0, 0, 4, 5, 0, 0], [0, 0, 6, 7, 0, 0]]) class TestKron: def test_basic(self): a = kron(array([[1, 2], [3, 4]]), array([[1, 1, 1]])) assert_array_equal(a, array([[1, 1, 1, 2, 2, 2], [3, 3, 3, 4, 4, 4]])) m1 = array([[1, 2], [3, 4]]) m2 = array([[10], [11]]) a = kron(m1, m2) expected = array([[10, 20], [11, 22], [30, 40], [33, 44]]) assert_array_equal(a, expected) class TestHelmert: def test_orthogonality(self): for n in range(1, 7): H = helmert(n, full=True) Id = np.eye(n) assert_allclose(H.dot(H.T), Id, atol=1e-12) assert_allclose(H.T.dot(H), Id, atol=1e-12) def test_subspace(self): for n in range(2, 7): H_full = helmert(n, full=True) H_partial = helmert(n) for U in H_full[1:, :].T, H_partial.T: C = np.eye(n) - np.full((n, n), 1 / n) assert_allclose(U.dot(U.T), C) assert_allclose(U.T.dot(U), np.eye(n-1), atol=1e-12) class TestHilbert: def test_basic(self): h3 = array([[1.0, 1/2., 1/3.], [1/2., 1/3., 1/4.], [1/3., 1/4., 1/5.]]) assert_array_almost_equal(hilbert(3), h3) assert_array_equal(hilbert(1), [[1.0]]) h0 = hilbert(0) assert_equal(h0.shape, (0, 0)) class TestInvHilbert: def test_basic(self): invh1 = array([[1]]) assert_array_equal(invhilbert(1, exact=True), invh1) assert_array_equal(invhilbert(1), invh1) invh2 = array([[4, -6], [-6, 12]]) assert_array_equal(invhilbert(2, exact=True), invh2) assert_array_almost_equal(invhilbert(2), invh2) invh3 = array([[9, -36, 30], [-36, 192, -180], [30, -180, 180]]) assert_array_equal(invhilbert(3, exact=True), invh3) assert_array_almost_equal(invhilbert(3), invh3) invh4 = array([[16, -120, 240, -140], [-120, 1200, -2700, 1680], [240, -2700, 6480, -4200], [-140, 1680, -4200, 2800]]) assert_array_equal(invhilbert(4, exact=True), invh4) assert_array_almost_equal(invhilbert(4), invh4) invh5 = array([[25, -300, 1050, -1400, 630], [-300, 4800, -18900, 26880, -12600], [1050, -18900, 79380, -117600, 56700], [-1400, 26880, -117600, 179200, -88200], [630, -12600, 56700, -88200, 44100]]) assert_array_equal(invhilbert(5, exact=True), invh5) assert_array_almost_equal(invhilbert(5), invh5) invh17 = array([ [289, -41616, 1976760, -46124400, 629598060, -5540462928, 33374693352, -143034400080, 446982500250, -1033026222800, 1774926873720, -2258997839280, 2099709530100, -1384423866000, 613101997800, -163493866080, 19835652870], [-41616, 7990272, -426980160, 10627061760, -151103534400, 1367702848512, -8410422724704, 36616806420480, -115857864064800, 270465047424000, -468580694662080, 600545887119360, -561522320049600, 372133135180800, -165537539406000, 44316454993920, -5395297580640], [1976760, -426980160, 24337869120, -630981792000, 9228108708000, -85267724461920, 532660105897920, -2348052711713280, 7504429831470000, -17664748409880000, 30818191841236800, -39732544853164800, 37341234283298400, -24857330514030000, 11100752642520000, -2982128117299200, 364182586693200], [-46124400, 10627061760, -630981792000, 16826181120000, -251209625940000, 2358021022156800, -14914482965141760, 66409571644416000, -214015221119700000, 507295338950400000, -890303319857952000, 1153715376477081600, -1089119333262870000, 727848632044800000, -326170262829600000, 87894302404608000, -10763618673376800], [629598060, -151103534400, 9228108708000, -251209625940000, 3810012660090000, -36210360321495360, 231343968720664800, -1038687206500944000, 3370739732635275000, -8037460526495400000, 14178080368737885600, -18454939322943942000, 17489975175339030000, -11728977435138600000, 5272370630081100000, -1424711708039692800, 174908803442373000], [-5540462928, 1367702848512, -85267724461920, 2358021022156800, -36210360321495360, 347619459086355456, -2239409617216035264, 10124803292907663360, -33052510749726468000, 79217210949138662400, -140362995650505067440, 183420385176741672960, -174433352415381259200, 117339159519533952000, -52892422160973595200, 14328529177999196160, -1763080738699119840], [33374693352, -8410422724704, 532660105897920, -14914482965141760, 231343968720664800, -2239409617216035264, 14527452132196331328, -66072377044391477760, 216799987176909536400, -521925895055522958000, 928414062734059661760, -1217424500995626443520, 1161358898976091015200, -783401860847777371200, 354015418167362952000, -96120549902411274240, 11851820521255194480], [-143034400080, 36616806420480, -2348052711713280, 66409571644416000, -1038687206500944000, 10124803292907663360, -66072377044391477760, 302045152202932469760, -995510145200094810000, 2405996923185123840000, -4294704507885446054400, 5649058909023744614400, -5403874060541811254400, 3654352703663101440000, -1655137020003255360000, 450325202737117593600, -55630994283442749600], [446982500250, -115857864064800, 7504429831470000, -214015221119700000, 3370739732635275000, -33052510749726468000, 216799987176909536400, -995510145200094810000, 3293967392206196062500, -7988661659013106500000, 14303908928401362270000, -18866974090684772052000, 18093328327706957325000, -12263364009096700500000, 5565847995255512250000, -1517208935002984080000, 187754605706619279900], [-1033026222800, 270465047424000, -17664748409880000, 507295338950400000, -8037460526495400000, 79217210949138662400, -521925895055522958000, 2405996923185123840000, -7988661659013106500000, 19434404971634224000000, -34894474126569249192000, 46141453390504792320000, -44349976506971935800000, 30121928988527376000000, -13697025107665828500000, 3740200989399948902400, -463591619028689580000], [1774926873720, -468580694662080, 30818191841236800, -890303319857952000, 14178080368737885600, -140362995650505067440, 928414062734059661760, -4294704507885446054400, 14303908928401362270000, -34894474126569249192000, 62810053427824648545600, -83243376594051600326400, 80177044485212743068000, -54558343880470209780000, 24851882355348879230400, -6797096028813368678400, 843736746632215035600], [-2258997839280, 600545887119360, -39732544853164800, 1153715376477081600, -18454939322943942000, 183420385176741672960, -1217424500995626443520, 5649058909023744614400, -18866974090684772052000, 46141453390504792320000, -83243376594051600326400, 110552468520163390156800, -106681852579497947388000, 72720410752415168870400, -33177973900974346080000, 9087761081682520473600, -1129631016152221783200], [2099709530100, -561522320049600, 37341234283298400, -1089119333262870000, 17489975175339030000, -174433352415381259200, 1161358898976091015200, -5403874060541811254400, 18093328327706957325000, -44349976506971935800000, 80177044485212743068000, -106681852579497947388000, 103125790826848015808400, -70409051543137015800000, 32171029219823375700000, -8824053728865840192000, 1098252376814660067000], [-1384423866000, 372133135180800, -24857330514030000, 727848632044800000, -11728977435138600000, 117339159519533952000, -783401860847777371200, 3654352703663101440000, -12263364009096700500000, 30121928988527376000000, -54558343880470209780000, 72720410752415168870400, -70409051543137015800000, 48142941226076592000000, -22027500987368499000000, 6049545098753157120000, -753830033789944188000], [613101997800, -165537539406000, 11100752642520000, -326170262829600000, 5272370630081100000, -52892422160973595200, 354015418167362952000, -1655137020003255360000, 5565847995255512250000, -13697025107665828500000, 24851882355348879230400, -33177973900974346080000, 32171029219823375700000, -22027500987368499000000, 10091416708498869000000, -2774765838662800128000, 346146444087219270000], [-163493866080, 44316454993920, -2982128117299200, 87894302404608000, -1424711708039692800, 14328529177999196160, -96120549902411274240, 450325202737117593600, -1517208935002984080000, 3740200989399948902400, -6797096028813368678400, 9087761081682520473600, -8824053728865840192000, 6049545098753157120000, -2774765838662800128000, 763806510427609497600, -95382575704033754400], [19835652870, -5395297580640, 364182586693200, -10763618673376800, 174908803442373000, -1763080738699119840, 11851820521255194480, -55630994283442749600, 187754605706619279900, -463591619028689580000, 843736746632215035600, -1129631016152221783200, 1098252376814660067000, -753830033789944188000, 346146444087219270000, -95382575704033754400, 11922821963004219300] ]) assert_array_equal(invhilbert(17, exact=True), invh17) assert_allclose(invhilbert(17), invh17.astype(float), rtol=1e-12) def test_inverse(self): for n in range(1, 10): a = hilbert(n) b = invhilbert(n) # The Hilbert matrix is increasingly badly conditioned, # so take that into account in the test c = cond(a) assert_allclose(a.dot(b), eye(n), atol=1e-15*c, rtol=1e-15*c) class TestPascal: cases = [ (1, array([[1]]), array([[1]])), (2, array([[1, 1], [1, 2]]), array([[1, 0], [1, 1]])), (3, array([[1, 1, 1], [1, 2, 3], [1, 3, 6]]), array([[1, 0, 0], [1, 1, 0], [1, 2, 1]])), (4, array([[1, 1, 1, 1], [1, 2, 3, 4], [1, 3, 6, 10], [1, 4, 10, 20]]), array([[1, 0, 0, 0], [1, 1, 0, 0], [1, 2, 1, 0], [1, 3, 3, 1]])), ] def check_case(self, n, sym, low): assert_array_equal(pascal(n), sym) assert_array_equal(pascal(n, kind='lower'), low) assert_array_equal(pascal(n, kind='upper'), low.T) assert_array_almost_equal(pascal(n, exact=False), sym) assert_array_almost_equal(pascal(n, exact=False, kind='lower'), low) assert_array_almost_equal(pascal(n, exact=False, kind='upper'), low.T) def test_cases(self): for n, sym, low in self.cases: self.check_case(n, sym, low) def test_big(self): p = pascal(50) assert_equal(p[-1, -1], comb(98, 49, exact=True)) def test_threshold(self): # Regression test. An early version of `pascal` returned an # array of type np.uint64 for n=35, but that data type is too small # to hold p[-1, -1]. The second assert_equal below would fail # because p[-1, -1] overflowed. p = pascal(34) assert_equal(2*p.item(-1, -2), p.item(-1, -1), err_msg="n = 34") p = pascal(35) assert_equal(2*p.item(-1, -2), p.item(-1, -1), err_msg="n = 35") def test_invpascal(): def check_invpascal(n, kind, exact): ip = invpascal(n, kind=kind, exact=exact) p = pascal(n, kind=kind, exact=exact) # Matrix-multiply ip and p, and check that we get the identity matrix. # We can't use the simple expression e = ip.dot(p), because when # n < 35 and exact is True, p.dtype is np.uint64 and ip.dtype is # np.int64. The product of those dtypes is np.float64, which loses # precision when n is greater than 18. Instead we'll cast both to # object arrays, and then multiply. e = ip.astype(object).dot(p.astype(object)) assert_array_equal(e, eye(n), err_msg="n=%d kind=%r exact=%r" % (n, kind, exact)) kinds = ['symmetric', 'lower', 'upper'] ns = [1, 2, 5, 18] for n in ns: for kind in kinds: for exact in [True, False]: check_invpascal(n, kind, exact) ns = [19, 34, 35, 50] for n in ns: for kind in kinds: check_invpascal(n, kind, True) def test_dft(): m = dft(2) expected = array([[1.0, 1.0], [1.0, -1.0]]) assert_array_almost_equal(m, expected) m = dft(2, scale='n') assert_array_almost_equal(m, expected/2.0) m = dft(2, scale='sqrtn') assert_array_almost_equal(m, expected/sqrt(2.0)) x = array([0, 1, 2, 3, 4, 5, 0, 1]) m = dft(8) mx = m.dot(x) fx = fft(x) assert_array_almost_equal(mx, fx) def test_fiedler(): f = fiedler([]) assert_equal(f.size, 0) f = fiedler([123.]) assert_array_equal(f, np.array([[0.]])) f = fiedler(np.arange(1, 7)) des = np.array([[0, 1, 2, 3, 4, 5], [1, 0, 1, 2, 3, 4], [2, 1, 0, 1, 2, 3], [3, 2, 1, 0, 1, 2], [4, 3, 2, 1, 0, 1], [5, 4, 3, 2, 1, 0]]) assert_array_equal(f, des) def test_fiedler_companion(): fc = fiedler_companion([]) assert_equal(fc.size, 0) fc = fiedler_companion([1.]) assert_equal(fc.size, 0) fc = fiedler_companion([1., 2.]) assert_array_equal(fc, np.array([[-2.]])) fc = fiedler_companion([1e-12, 2., 3.]) assert_array_almost_equal(fc, companion([1e-12, 2., 3.])) with assert_raises(ValueError): fiedler_companion([0, 1, 2]) fc = fiedler_companion([1., -16., 86., -176., 105.]) assert_array_almost_equal(eigvals(fc), np.array([7., 5., 3., 1.])) class TestConvolutionMatrix: """ Test convolution_matrix vs. numpy.convolve for various parameters. """ def create_vector(self, n, cpx): """Make a complex or real test vector of length n.""" x = np.linspace(-2.5, 2.2, n) if cpx: x = x + 1j*np.linspace(-1.5, 3.1, n) return x def test_bad_n(self): # n must be a positive integer with pytest.raises(ValueError, match='n must be a positive integer'): convolution_matrix([1, 2, 3], 0) def test_bad_first_arg(self): # first arg must be a 1d array, otherwise ValueError with pytest.raises(ValueError, match='one-dimensional'): convolution_matrix(1, 4) def test_empty_first_arg(self): # first arg must have at least one value with pytest.raises(ValueError, match=r'len\(a\)'): convolution_matrix([], 4) def test_bad_mode(self): # mode must be in ('full', 'valid', 'same') with pytest.raises(ValueError, match='mode.*must be one of'): convolution_matrix((1, 1), 4, mode='invalid argument') @pytest.mark.parametrize('cpx', [False, True]) @pytest.mark.parametrize('na', [1, 2, 9]) @pytest.mark.parametrize('nv', [1, 2, 9]) @pytest.mark.parametrize('mode', [None, 'full', 'valid', 'same']) def test_against_numpy_convolve(self, cpx, na, nv, mode): a = self.create_vector(na, cpx) v = self.create_vector(nv, cpx) if mode is None: y1 = np.convolve(v, a) A = convolution_matrix(a, nv) else: y1 = np.convolve(v, a, mode) A = convolution_matrix(a, nv, mode) y2 = A @ v assert_array_almost_equal(y1, y2)
bsd-3-clause
dcroc16/skunk_works
google_appengine/lib/django-1.3/tests/regressiontests/serializers_regress/models.py
49
7687
""" A test spanning all the capabilities of all the serializers. This class sets up a model for each model field type (except for image types, because of the PIL dependency). """ from django.db import models from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.localflavor.us.models import USStateField, PhoneNumberField # The following classes are for testing basic data # marshalling, including NULL values, where allowed. class BooleanData(models.Model): data = models.BooleanField() class CharData(models.Model): data = models.CharField(max_length=30, null=True) class DateData(models.Model): data = models.DateField(null=True) class DateTimeData(models.Model): data = models.DateTimeField(null=True) class DecimalData(models.Model): data = models.DecimalField(null=True, decimal_places=3, max_digits=5) class EmailData(models.Model): data = models.EmailField(null=True) class FileData(models.Model): data = models.FileField(null=True, upload_to='/foo/bar') class FilePathData(models.Model): data = models.FilePathField(null=True) class FloatData(models.Model): data = models.FloatField(null=True) class IntegerData(models.Model): data = models.IntegerField(null=True) class BigIntegerData(models.Model): data = models.BigIntegerField(null=True) # class ImageData(models.Model): # data = models.ImageField(null=True) class IPAddressData(models.Model): data = models.IPAddressField(null=True) class NullBooleanData(models.Model): data = models.NullBooleanField(null=True) class PhoneData(models.Model): data = PhoneNumberField(null=True) class PositiveIntegerData(models.Model): data = models.PositiveIntegerField(null=True) class PositiveSmallIntegerData(models.Model): data = models.PositiveSmallIntegerField(null=True) class SlugData(models.Model): data = models.SlugField(null=True) class SmallData(models.Model): data = models.SmallIntegerField(null=True) class TextData(models.Model): data = models.TextField(null=True) class TimeData(models.Model): data = models.TimeField(null=True) class USStateData(models.Model): data = USStateField(null=True) class Tag(models.Model): """A tag on an item.""" data = models.SlugField() content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = generic.GenericForeignKey() class Meta: ordering = ["data"] class GenericData(models.Model): data = models.CharField(max_length=30) tags = generic.GenericRelation(Tag) # The following test classes are all for validation # of related objects; in particular, forward, backward, # and self references. class Anchor(models.Model): """This is a model that can be used as something for other models to point at""" data = models.CharField(max_length=30) class Meta: ordering = ('id',) class UniqueAnchor(models.Model): """This is a model that can be used as something for other models to point at""" data = models.CharField(unique=True, max_length=30) class FKData(models.Model): data = models.ForeignKey(Anchor, null=True) class M2MData(models.Model): data = models.ManyToManyField(Anchor, null=True) class O2OData(models.Model): # One to one field can't be null here, since it is a PK. data = models.OneToOneField(Anchor, primary_key=True) class FKSelfData(models.Model): data = models.ForeignKey('self', null=True) class M2MSelfData(models.Model): data = models.ManyToManyField('self', null=True, symmetrical=False) class FKDataToField(models.Model): data = models.ForeignKey(UniqueAnchor, null=True, to_field='data') class FKDataToO2O(models.Model): data = models.ForeignKey(O2OData, null=True) class M2MIntermediateData(models.Model): data = models.ManyToManyField(Anchor, null=True, through='Intermediate') class Intermediate(models.Model): left = models.ForeignKey(M2MIntermediateData) right = models.ForeignKey(Anchor) extra = models.CharField(max_length=30, blank=True, default="doesn't matter") # The following test classes are for validating the # deserialization of objects that use a user-defined # field as the primary key. # Some of these data types have been commented out # because they can't be used as a primary key on one # or all database backends. class BooleanPKData(models.Model): data = models.BooleanField(primary_key=True) class CharPKData(models.Model): data = models.CharField(max_length=30, primary_key=True) # class DatePKData(models.Model): # data = models.DateField(primary_key=True) # class DateTimePKData(models.Model): # data = models.DateTimeField(primary_key=True) class DecimalPKData(models.Model): data = models.DecimalField(primary_key=True, decimal_places=3, max_digits=5) class EmailPKData(models.Model): data = models.EmailField(primary_key=True) # class FilePKData(models.Model): # data = models.FileField(primary_key=True, upload_to='/foo/bar') class FilePathPKData(models.Model): data = models.FilePathField(primary_key=True) class FloatPKData(models.Model): data = models.FloatField(primary_key=True) class IntegerPKData(models.Model): data = models.IntegerField(primary_key=True) # class ImagePKData(models.Model): # data = models.ImageField(primary_key=True) class IPAddressPKData(models.Model): data = models.IPAddressField(primary_key=True) # This is just a Boolean field with null=True, and we can't test a PK value of NULL. # class NullBooleanPKData(models.Model): # data = models.NullBooleanField(primary_key=True) class PhonePKData(models.Model): data = PhoneNumberField(primary_key=True) class PositiveIntegerPKData(models.Model): data = models.PositiveIntegerField(primary_key=True) class PositiveSmallIntegerPKData(models.Model): data = models.PositiveSmallIntegerField(primary_key=True) class SlugPKData(models.Model): data = models.SlugField(primary_key=True) class SmallPKData(models.Model): data = models.SmallIntegerField(primary_key=True) # class TextPKData(models.Model): # data = models.TextField(primary_key=True) # class TimePKData(models.Model): # data = models.TimeField(primary_key=True) class USStatePKData(models.Model): data = USStateField(primary_key=True) class ComplexModel(models.Model): field1 = models.CharField(max_length=10) field2 = models.CharField(max_length=10) field3 = models.CharField(max_length=10) # Tests for handling fields with pre_save functions, or # models with save functions that modify data class AutoNowDateTimeData(models.Model): data = models.DateTimeField(null=True, auto_now=True) class ModifyingSaveData(models.Model): data = models.IntegerField(null=True) def save(self): "A save method that modifies the data in the object" self.data = 666 super(ModifyingSaveData, self).save(raw) # Tests for serialization of models using inheritance. # Regression for #7202, #7350 class AbstractBaseModel(models.Model): parent_data = models.IntegerField() class Meta: abstract = True class InheritAbstractModel(AbstractBaseModel): child_data = models.IntegerField() class BaseModel(models.Model): parent_data = models.IntegerField() class InheritBaseModel(BaseModel): child_data = models.IntegerField() class ExplicitInheritBaseModel(BaseModel): parent = models.OneToOneField(BaseModel) child_data = models.IntegerField() class LengthModel(models.Model): data = models.IntegerField() def __len__(self): return self.data
mit
ogrisel/numpy
numpy/oldnumeric/precision.py
13
4323
""" Lifted from Precision.py. This is for compatibility only. The character strings are still for "new" NumPy which is the only Incompatibility with Numeric """ from __future__ import division, absolute_import, print_function __all__ = ['Character', 'Complex', 'Float', 'PrecisionError', 'PyObject', 'Int', 'UInt', 'UnsignedInt', 'UnsignedInteger', 'string', 'typecodes', 'zeros'] from .functions import zeros import string # for backwards compatibility typecodes = {'Character':'c', 'Integer':'bhil', 'UnsignedInteger':'BHIL', 'Float':'fd', 'Complex':'FD'} def _get_precisions(typecodes): lst = [] for t in typecodes: lst.append( (zeros( (1,), t ).itemsize*8, t) ) return lst def _fill_table(typecodes, table={}): for key, value in typecodes.items(): table[key] = _get_precisions(value) return table _code_table = _fill_table(typecodes) class PrecisionError(Exception): pass def _lookup(table, key, required_bits): lst = table[key] for bits, typecode in lst: if bits >= required_bits: return typecode raise PrecisionError(key + " of " + str(required_bits) + " bits not available on this system") Character = 'c' try: UnsignedInt8 = _lookup(_code_table, "UnsignedInteger", 8) UInt8 = UnsignedInt8 __all__.extend(['UnsignedInt8', 'UInt8']) except(PrecisionError): pass try: UnsignedInt16 = _lookup(_code_table, "UnsignedInteger", 16) UInt16 = UnsignedInt16 __all__.extend(['UnsignedInt16', 'UInt16']) except(PrecisionError): pass try: UnsignedInt32 = _lookup(_code_table, "UnsignedInteger", 32) UInt32 = UnsignedInt32 __all__.extend(['UnsignedInt32', 'UInt32']) except(PrecisionError): pass try: UnsignedInt64 = _lookup(_code_table, "UnsignedInteger", 64) UInt64 = UnsignedInt64 __all__.extend(['UnsignedInt64', 'UInt64']) except(PrecisionError): pass try: UnsignedInt128 = _lookup(_code_table, "UnsignedInteger", 128) UInt128 = UnsignedInt128 __all__.extend(['UnsignedInt128', 'UInt128']) except(PrecisionError): pass UInt = UnsignedInt = UnsignedInteger = 'u' try: Int0 = _lookup(_code_table, 'Integer', 0) __all__.append('Int0') except(PrecisionError): pass try: Int8 = _lookup(_code_table, 'Integer', 8) __all__.append('Int8') except(PrecisionError): pass try: Int16 = _lookup(_code_table, 'Integer', 16) __all__.append('Int16') except(PrecisionError): pass try: Int32 = _lookup(_code_table, 'Integer', 32) __all__.append('Int32') except(PrecisionError): pass try: Int64 = _lookup(_code_table, 'Integer', 64) __all__.append('Int64') except(PrecisionError): pass try: Int128 = _lookup(_code_table, 'Integer', 128) __all__.append('Int128') except(PrecisionError): pass Int = 'l' try: Float0 = _lookup(_code_table, 'Float', 0) __all__.append('Float0') except(PrecisionError): pass try: Float8 = _lookup(_code_table, 'Float', 8) __all__.append('Float8') except(PrecisionError): pass try: Float16 = _lookup(_code_table, 'Float', 16) __all__.append('Float16') except(PrecisionError): pass try: Float32 = _lookup(_code_table, 'Float', 32) __all__.append('Float32') except(PrecisionError): pass try: Float64 = _lookup(_code_table, 'Float', 64) __all__.append('Float64') except(PrecisionError): pass try: Float128 = _lookup(_code_table, 'Float', 128) __all__.append('Float128') except(PrecisionError): pass Float = 'd' try: Complex0 = _lookup(_code_table, 'Complex', 0) __all__.append('Complex0') except(PrecisionError): pass try: Complex8 = _lookup(_code_table, 'Complex', 16) __all__.append('Complex8') except(PrecisionError): pass try: Complex16 = _lookup(_code_table, 'Complex', 32) __all__.append('Complex16') except(PrecisionError): pass try: Complex32 = _lookup(_code_table, 'Complex', 64) __all__.append('Complex32') except(PrecisionError): pass try: Complex64 = _lookup(_code_table, 'Complex', 128) __all__.append('Complex64') except(PrecisionError): pass try: Complex128 = _lookup(_code_table, 'Complex', 256) __all__.append('Complex128') except(PrecisionError): pass Complex = 'D' PyObject = 'O'
bsd-3-clause
Perferom/android_external_chromium_org
third_party/protobuf/python/google/protobuf/internal/cpp_message.py
223
23539
# Protocol Buffers - Google's data interchange format # Copyright 2008 Google Inc. All rights reserved. # http://code.google.com/p/protobuf/ # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Contains helper functions used to create protocol message classes from Descriptor objects at runtime backed by the protocol buffer C++ API. """ __author__ = 'petar@google.com (Petar Petrov)' import copy_reg import operator from google.protobuf.internal import _net_proto2___python from google.protobuf.internal import enum_type_wrapper from google.protobuf import message _LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED _LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL _CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE _TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE def GetDescriptorPool(): """Creates a new DescriptorPool C++ object.""" return _net_proto2___python.NewCDescriptorPool() _pool = GetDescriptorPool() def GetFieldDescriptor(full_field_name): """Searches for a field descriptor given a full field name.""" return _pool.FindFieldByName(full_field_name) def BuildFile(content): """Registers a new proto file in the underlying C++ descriptor pool.""" _net_proto2___python.BuildFile(content) def GetExtensionDescriptor(full_extension_name): """Searches for extension descriptor given a full field name.""" return _pool.FindExtensionByName(full_extension_name) def NewCMessage(full_message_name): """Creates a new C++ protocol message by its name.""" return _net_proto2___python.NewCMessage(full_message_name) def ScalarProperty(cdescriptor): """Returns a scalar property for the given descriptor.""" def Getter(self): return self._cmsg.GetScalar(cdescriptor) def Setter(self, value): self._cmsg.SetScalar(cdescriptor, value) return property(Getter, Setter) def CompositeProperty(cdescriptor, message_type): """Returns a Python property the given composite field.""" def Getter(self): sub_message = self._composite_fields.get(cdescriptor.name, None) if sub_message is None: cmessage = self._cmsg.NewSubMessage(cdescriptor) sub_message = message_type._concrete_class(__cmessage=cmessage) self._composite_fields[cdescriptor.name] = sub_message return sub_message return property(Getter) class RepeatedScalarContainer(object): """Container for repeated scalar fields.""" __slots__ = ['_message', '_cfield_descriptor', '_cmsg'] def __init__(self, msg, cfield_descriptor): self._message = msg self._cmsg = msg._cmsg self._cfield_descriptor = cfield_descriptor def append(self, value): self._cmsg.AddRepeatedScalar( self._cfield_descriptor, value) def extend(self, sequence): for element in sequence: self.append(element) def insert(self, key, value): values = self[slice(None, None, None)] values.insert(key, value) self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def remove(self, value): values = self[slice(None, None, None)] values.remove(value) self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def __setitem__(self, key, value): values = self[slice(None, None, None)] values[key] = value self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) def __getitem__(self, key): return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key) def __delitem__(self, key): self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key) def __len__(self): return len(self[slice(None, None, None)]) def __eq__(self, other): if self is other: return True if not operator.isSequenceType(other): raise TypeError( 'Can only compare repeated scalar fields against sequences.') # We are presumably comparing against some other sequence type. return other == self[slice(None, None, None)] def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def sort(self, *args, **kwargs): # Maintain compatibility with the previous interface. if 'sort_function' in kwargs: kwargs['cmp'] = kwargs.pop('sort_function') self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, sorted(self, *args, **kwargs)) def RepeatedScalarProperty(cdescriptor): """Returns a Python property the given repeated scalar field.""" def Getter(self): container = self._composite_fields.get(cdescriptor.name, None) if container is None: container = RepeatedScalarContainer(self, cdescriptor) self._composite_fields[cdescriptor.name] = container return container def Setter(self, new_value): raise AttributeError('Assignment not allowed to repeated field ' '"%s" in protocol message object.' % cdescriptor.name) doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name return property(Getter, Setter, doc=doc) class RepeatedCompositeContainer(object): """Container for repeated composite fields.""" __slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg'] def __init__(self, msg, cfield_descriptor, subclass): self._message = msg self._cmsg = msg._cmsg self._subclass = subclass self._cfield_descriptor = cfield_descriptor def add(self, **kwargs): cmessage = self._cmsg.AddMessage(self._cfield_descriptor) return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs) def extend(self, elem_seq): """Extends by appending the given sequence of elements of the same type as this one, copying each individual message. """ for message in elem_seq: self.add().MergeFrom(message) def remove(self, value): # TODO(protocol-devel): This is inefficient as it needs to generate a # message pointer for each message only to do index(). Move this to a C++ # extension function. self.__delitem__(self[slice(None, None, None)].index(value)) def MergeFrom(self, other): for message in other[:]: self.add().MergeFrom(message) def __getitem__(self, key): cmessages = self._cmsg.GetRepeatedMessage( self._cfield_descriptor, key) subclass = self._subclass if not isinstance(cmessages, list): return subclass(__cmessage=cmessages, __owner=self._message) return [subclass(__cmessage=m, __owner=self._message) for m in cmessages] def __delitem__(self, key): self._cmsg.DeleteRepeatedField( self._cfield_descriptor, key) def __len__(self): return self._cmsg.FieldLength(self._cfield_descriptor) def __eq__(self, other): """Compares the current instance with another one.""" if self is other: return True if not isinstance(other, self.__class__): raise TypeError('Can only compare repeated composite fields against ' 'other repeated composite fields.') messages = self[slice(None, None, None)] other_messages = other[slice(None, None, None)] return messages == other_messages def __hash__(self): raise TypeError('unhashable object') def sort(self, cmp=None, key=None, reverse=False, **kwargs): # Maintain compatibility with the old interface. if cmp is None and 'sort_function' in kwargs: cmp = kwargs.pop('sort_function') # The cmp function, if provided, is passed the results of the key function, # so we only need to wrap one of them. if key is None: index_key = self.__getitem__ else: index_key = lambda i: key(self[i]) # Sort the list of current indexes by the underlying object. indexes = range(len(self)) indexes.sort(cmp=cmp, key=index_key, reverse=reverse) # Apply the transposition. for dest, src in enumerate(indexes): if dest == src: continue self._cmsg.SwapRepeatedFieldElements(self._cfield_descriptor, dest, src) # Don't swap the same value twice. indexes[src] = src def RepeatedCompositeProperty(cdescriptor, message_type): """Returns a Python property for the given repeated composite field.""" def Getter(self): container = self._composite_fields.get(cdescriptor.name, None) if container is None: container = RepeatedCompositeContainer( self, cdescriptor, message_type._concrete_class) self._composite_fields[cdescriptor.name] = container return container def Setter(self, new_value): raise AttributeError('Assignment not allowed to repeated field ' '"%s" in protocol message object.' % cdescriptor.name) doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name return property(Getter, Setter, doc=doc) class ExtensionDict(object): """Extension dictionary added to each protocol message.""" def __init__(self, msg): self._message = msg self._cmsg = msg._cmsg self._values = {} def __setitem__(self, extension, value): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_OPTIONAL or cdescriptor.cpp_type == _CPPTYPE_MESSAGE): raise TypeError('Extension %r is repeated and/or a composite type.' % ( extension.full_name,)) self._cmsg.SetScalar(cdescriptor, value) self._values[extension] = value def __getitem__(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_REPEATED and cdescriptor.cpp_type != _CPPTYPE_MESSAGE): return self._cmsg.GetScalar(cdescriptor) ext = self._values.get(extension, None) if ext is not None: return ext ext = self._CreateNewHandle(extension) self._values[extension] = ext return ext def ClearExtension(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) self._cmsg.ClearFieldByDescriptor(extension._cdescriptor) if extension in self._values: del self._values[extension] def HasExtension(self, extension): from google.protobuf import descriptor if not isinstance(extension, descriptor.FieldDescriptor): raise KeyError('Bad extension %r.' % (extension,)) return self._cmsg.HasFieldByDescriptor(extension._cdescriptor) def _FindExtensionByName(self, name): """Tries to find a known extension with the specified name. Args: name: Extension full name. Returns: Extension field descriptor. """ return self._message._extensions_by_name.get(name, None) def _CreateNewHandle(self, extension): cdescriptor = extension._cdescriptor if (cdescriptor.label != _LABEL_REPEATED and cdescriptor.cpp_type == _CPPTYPE_MESSAGE): cmessage = self._cmsg.NewSubMessage(cdescriptor) return extension.message_type._concrete_class(__cmessage=cmessage) if cdescriptor.label == _LABEL_REPEATED: if cdescriptor.cpp_type == _CPPTYPE_MESSAGE: return RepeatedCompositeContainer( self._message, cdescriptor, extension.message_type._concrete_class) else: return RepeatedScalarContainer(self._message, cdescriptor) # This shouldn't happen! assert False return None def NewMessage(bases, message_descriptor, dictionary): """Creates a new protocol message *class*.""" _AddClassAttributesForNestedExtensions(message_descriptor, dictionary) _AddEnumValues(message_descriptor, dictionary) _AddDescriptors(message_descriptor, dictionary) return bases def InitMessage(message_descriptor, cls): """Constructs a new message instance (called before instance's __init__).""" cls._extensions_by_name = {} _AddInitMethod(message_descriptor, cls) _AddMessageMethods(message_descriptor, cls) _AddPropertiesForExtensions(message_descriptor, cls) copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) def _AddDescriptors(message_descriptor, dictionary): """Sets up a new protocol message class dictionary. Args: message_descriptor: A Descriptor instance describing this message type. dictionary: Class dictionary to which we'll add a '__slots__' entry. """ dictionary['__descriptors'] = {} for field in message_descriptor.fields: dictionary['__descriptors'][field.name] = GetFieldDescriptor( field.full_name) dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [ '_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS'] def _AddEnumValues(message_descriptor, dictionary): """Sets class-level attributes for all enum fields defined in this message. Args: message_descriptor: Descriptor object for this message type. dictionary: Class dictionary that should be populated. """ for enum_type in message_descriptor.enum_types: dictionary[enum_type.name] = enum_type_wrapper.EnumTypeWrapper(enum_type) for enum_value in enum_type.values: dictionary[enum_value.name] = enum_value.number def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary): """Adds class attributes for the nested extensions.""" extension_dict = message_descriptor.extensions_by_name for extension_name, extension_field in extension_dict.iteritems(): assert extension_name not in dictionary dictionary[extension_name] = extension_field def _AddInitMethod(message_descriptor, cls): """Adds an __init__ method to cls.""" # Create and attach message field properties to the message class. # This can be done just once per message class, since property setters and # getters are passed the message instance. # This makes message instantiation extremely fast, and at the same time it # doesn't require the creation of property objects for each message instance, # which saves a lot of memory. for field in message_descriptor.fields: field_cdescriptor = cls.__descriptors[field.name] if field.label == _LABEL_REPEATED: if field.cpp_type == _CPPTYPE_MESSAGE: value = RepeatedCompositeProperty(field_cdescriptor, field.message_type) else: value = RepeatedScalarProperty(field_cdescriptor) elif field.cpp_type == _CPPTYPE_MESSAGE: value = CompositeProperty(field_cdescriptor, field.message_type) else: value = ScalarProperty(field_cdescriptor) setattr(cls, field.name, value) # Attach a constant with the field number. constant_name = field.name.upper() + '_FIELD_NUMBER' setattr(cls, constant_name, field.number) def Init(self, **kwargs): """Message constructor.""" cmessage = kwargs.pop('__cmessage', None) if cmessage: self._cmsg = cmessage else: self._cmsg = NewCMessage(message_descriptor.full_name) # Keep a reference to the owner, as the owner keeps a reference to the # underlying protocol buffer message. owner = kwargs.pop('__owner', None) if owner: self._owner = owner if message_descriptor.is_extendable: self.Extensions = ExtensionDict(self) else: # Reference counting in the C++ code is broken and depends on # the Extensions reference to keep this object alive during unit # tests (see b/4856052). Remove this once b/4945904 is fixed. self._HACK_REFCOUNTS = self self._composite_fields = {} for field_name, field_value in kwargs.iteritems(): field_cdescriptor = self.__descriptors.get(field_name, None) if not field_cdescriptor: raise ValueError('Protocol message has no "%s" field.' % field_name) if field_cdescriptor.label == _LABEL_REPEATED: if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: field_name = getattr(self, field_name) for val in field_value: field_name.add().MergeFrom(val) else: getattr(self, field_name).extend(field_value) elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: getattr(self, field_name).MergeFrom(field_value) else: setattr(self, field_name, field_value) Init.__module__ = None Init.__doc__ = None cls.__init__ = Init def _IsMessageSetExtension(field): """Checks if a field is a message set extension.""" return (field.is_extension and field.containing_type.has_options and field.containing_type.GetOptions().message_set_wire_format and field.type == _TYPE_MESSAGE and field.message_type == field.extension_scope and field.label == _LABEL_OPTIONAL) def _AddMessageMethods(message_descriptor, cls): """Adds the methods to a protocol message class.""" if message_descriptor.is_extendable: def ClearExtension(self, extension): self.Extensions.ClearExtension(extension) def HasExtension(self, extension): return self.Extensions.HasExtension(extension) def HasField(self, field_name): return self._cmsg.HasField(field_name) def ClearField(self, field_name): child_cmessage = None if field_name in self._composite_fields: child_field = self._composite_fields[field_name] del self._composite_fields[field_name] child_cdescriptor = self.__descriptors[field_name] # TODO(anuraag): Support clearing repeated message fields as well. if (child_cdescriptor.label != _LABEL_REPEATED and child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): child_field._owner = None child_cmessage = child_field._cmsg if child_cmessage is not None: self._cmsg.ClearField(field_name, child_cmessage) else: self._cmsg.ClearField(field_name) def Clear(self): cmessages_to_release = [] for field_name, child_field in self._composite_fields.iteritems(): child_cdescriptor = self.__descriptors[field_name] # TODO(anuraag): Support clearing repeated message fields as well. if (child_cdescriptor.label != _LABEL_REPEATED and child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): child_field._owner = None cmessages_to_release.append((child_cdescriptor, child_field._cmsg)) self._composite_fields.clear() self._cmsg.Clear(cmessages_to_release) def IsInitialized(self, errors=None): if self._cmsg.IsInitialized(): return True if errors is not None: errors.extend(self.FindInitializationErrors()); return False def SerializeToString(self): if not self.IsInitialized(): raise message.EncodeError( 'Message %s is missing required fields: %s' % ( self._cmsg.full_name, ','.join(self.FindInitializationErrors()))) return self._cmsg.SerializeToString() def SerializePartialToString(self): return self._cmsg.SerializePartialToString() def ParseFromString(self, serialized): self.Clear() self.MergeFromString(serialized) def MergeFromString(self, serialized): byte_size = self._cmsg.MergeFromString(serialized) if byte_size < 0: raise message.DecodeError('Unable to merge from string.') return byte_size def MergeFrom(self, msg): if not isinstance(msg, cls): raise TypeError( "Parameter to MergeFrom() must be instance of same class: " "expected %s got %s." % (cls.__name__, type(msg).__name__)) self._cmsg.MergeFrom(msg._cmsg) def CopyFrom(self, msg): self._cmsg.CopyFrom(msg._cmsg) def ByteSize(self): return self._cmsg.ByteSize() def SetInParent(self): return self._cmsg.SetInParent() def ListFields(self): all_fields = [] field_list = self._cmsg.ListFields() fields_by_name = cls.DESCRIPTOR.fields_by_name for is_extension, field_name in field_list: if is_extension: extension = cls._extensions_by_name[field_name] all_fields.append((extension, self.Extensions[extension])) else: field_descriptor = fields_by_name[field_name] all_fields.append( (field_descriptor, getattr(self, field_name))) all_fields.sort(key=lambda item: item[0].number) return all_fields def FindInitializationErrors(self): return self._cmsg.FindInitializationErrors() def __str__(self): return self._cmsg.DebugString() def __eq__(self, other): if self is other: return True if not isinstance(other, self.__class__): return False return self.ListFields() == other.ListFields() def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def __unicode__(self): # Lazy import to prevent circular import when text_format imports this file. from google.protobuf import text_format return text_format.MessageToString(self, as_utf8=True).decode('utf-8') # Attach the local methods to the message class. for key, value in locals().copy().iteritems(): if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): setattr(cls, key, value) # Static methods: def RegisterExtension(extension_handle): extension_handle.containing_type = cls.DESCRIPTOR cls._extensions_by_name[extension_handle.full_name] = extension_handle if _IsMessageSetExtension(extension_handle): # MessageSet extension. Also register under type name. cls._extensions_by_name[ extension_handle.message_type.full_name] = extension_handle cls.RegisterExtension = staticmethod(RegisterExtension) def FromString(string): msg = cls() msg.MergeFromString(string) return msg cls.FromString = staticmethod(FromString) def _AddPropertiesForExtensions(message_descriptor, cls): """Adds properties for all fields in this protocol message type.""" extension_dict = message_descriptor.extensions_by_name for extension_name, extension_field in extension_dict.iteritems(): constant_name = extension_name.upper() + '_FIELD_NUMBER' setattr(cls, constant_name, extension_field.number)
bsd-3-clause
veger/ansible
lib/ansible/plugins/action/sros_config.py
27
4241
# # Copyright 2016 Peter Sprygada <psprygada@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re import time import glob from ansible.plugins.action.sros import ActionModule as _ActionModule from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.parse import urlsplit from ansible.utils.vars import merge_hash PRIVATE_KEYS_RE = re.compile('__.+__') class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): if self._task.args.get('src'): try: self._handle_template() except ValueError as exc: return dict(failed=True, msg=to_text(exc)) result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect if self._task.args.get('backup') and result.get('__backup__'): # User requested backup and no error occurred in module. # NOTE: If there is a parameter error, _backup key may not be in results. filepath = self._write_backup(task_vars['inventory_hostname'], result['__backup__']) result['backup_path'] = filepath # strip out any keys that have two leading and two trailing # underscore characters for key in list(result): if PRIVATE_KEYS_RE.match(key): del result[key] return result def _get_working_path(self): cwd = self._loader.get_basedir() if self._task._role is not None: cwd = self._task._role._role_path return cwd def _write_backup(self, host, contents): backup_path = self._get_working_path() + '/backup' if not os.path.exists(backup_path): os.mkdir(backup_path) for fn in glob.glob('%s/%s*' % (backup_path, host)): os.remove(fn) tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time())) filename = '%s/%s_config.%s' % (backup_path, host, tstamp) open(filename, 'w').write(contents) return filename def _handle_template(self): src = self._task.args.get('src') working_path = self._get_working_path() if os.path.isabs(src) or urlsplit('src').scheme: source = src else: source = self._loader.path_dwim_relative(working_path, 'templates', src) if not source: source = self._loader.path_dwim_relative(working_path, src) if not os.path.exists(source): raise ValueError('path specified in src not found') try: with open(source, 'r') as f: template_data = to_text(f.read()) except IOError: return dict(failed=True, msg='unable to load src file') # Create a template search path in the following order: # [working_path, self_role_path, dependent_role_paths, dirname(source)] searchpath = [working_path] if self._task._role is not None: searchpath.append(self._task._role._role_path) if hasattr(self._task, "_block:"): dep_chain = self._task._block.get_dep_chain() if dep_chain is not None: for role in dep_chain: searchpath.append(role._role_path) searchpath.append(os.path.dirname(source)) self._templar.environment.loader.searchpath = searchpath self._task.args['src'] = self._templar.template(template_data)
gpl-3.0
taedori81/saleor
saleor/dashboard/product/views.py
7
10930
from __future__ import unicode_literals from django.contrib import messages from django.shortcuts import get_object_or_404, redirect from django.template.response import TemplateResponse from django.utils.http import is_safe_url from django.utils.translation import ugettext_lazy as _ from django.views.decorators.http import require_http_methods from ...product.models import Product, ProductImage, Stock, ProductAttribute, \ ProductVariant from ..utils import paginate from ..views import staff_member_required from . import forms @staff_member_required def product_list(request): products = Product.objects.prefetch_related('images').select_subclasses() form = forms.ProductClassForm(request.POST or None) if form.is_valid(): return redirect('dashboard:product-add') products, paginator = paginate(products, 30, request.GET.get('page')) ctx = {'form': form, 'products': products, 'paginator': paginator} return TemplateResponse(request, 'dashboard/product/list.html', ctx) @staff_member_required def product_create(request): product = Product() form = forms.ProductForm(request.POST or None, instance=product) if form.is_valid(): product = form.save() msg = _('Added product %s') % product messages.success(request, msg) return redirect('dashboard:variant-add', product_pk=product.pk) ctx = {'product_form': form, 'product': product} return TemplateResponse(request, 'dashboard/product/product_form.html', ctx) @staff_member_required def product_edit(request, pk): product = get_object_or_404( Product.objects.select_subclasses().prefetch_related('images', 'variants'), pk=pk) attributes = product.attributes.prefetch_related('values') images = product.images.all() variants = product.variants.select_subclasses() stock_items = Stock.objects.filter(variant__in=variants) form = forms.ProductForm(request.POST or None, instance=product) variants_delete_form = forms.VariantBulkDeleteForm() stock_delete_form = forms.StockBulkDeleteForm() if form.is_valid(): product = form.save() msg = _('Updated product %s') % product messages.success(request, msg) return redirect('dashboard:product-update', pk=product.pk) ctx = {'attributes': attributes, 'images': images, 'product_form': form, 'product': product, 'stock_delete_form': stock_delete_form, 'stock_items': stock_items, 'variants': variants, 'variants_delete_form': variants_delete_form} return TemplateResponse(request, 'dashboard/product/product_form.html', ctx) @staff_member_required def product_delete(request, pk): product = get_object_or_404(Product, pk=pk) if request.method == 'POST': product.delete() messages.success(request, _('Deleted product %s') % product) return redirect('dashboard:product-list') return TemplateResponse( request, 'dashboard/product/modal_product_confirm_delete.html', {'product': product}) @staff_member_required def stock_edit(request, product_pk, stock_pk=None): product = get_object_or_404(Product, pk=product_pk) if stock_pk: stock = get_object_or_404(Stock, pk=stock_pk) else: stock = Stock() form = forms.StockForm(request.POST or None, instance=stock, product=product) if form.is_valid(): form.save() messages.success(request, _('Saved stock')) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'form': form, 'product': product, 'stock': stock} return TemplateResponse(request, 'dashboard/product/stock_form.html', ctx) @staff_member_required def stock_delete(request, product_pk, stock_pk): product = get_object_or_404(Product, pk=product_pk) stock = get_object_or_404(Stock, pk=stock_pk) if request.method == 'POST': stock.delete() messages.success(request, _('Deleted stock')) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'product': product, 'stock': stock} return TemplateResponse( request, 'dashboard/product/stock_confirm_delete.html', ctx) @staff_member_required @require_http_methods(['POST']) def stock_bulk_delete(request, product_pk): product = get_object_or_404(Product, pk=product_pk) form = forms.StockBulkDeleteForm(request.POST) if form.is_valid(): form.delete() success_url = request.POST['success_url'] messages.success(request, _('Deleted stock')) if is_safe_url(success_url, request.get_host()): return redirect(success_url) return redirect('dashboard:product-update', pk=product.pk) @staff_member_required def product_image_edit(request, product_pk, img_pk=None): product = get_object_or_404(Product, pk=product_pk) if img_pk: product_image = get_object_or_404(product.images, pk=img_pk) else: product_image = ProductImage(product=product) form = forms.ProductImageForm(request.POST or None, request.FILES or None, instance=product_image) if form.is_valid(): product_image = form.save() if img_pk: msg = _('Updated image %s') % product_image.image.name else: msg = _('Added image %s') % product_image.image.name messages.success(request, msg) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'form': form, 'product': product, 'product_image': product_image} return TemplateResponse( request, 'dashboard/product/product_image_form.html', ctx) @staff_member_required def product_image_delete(request, product_pk, img_pk): product = get_object_or_404(Product, pk=product_pk) product_image = get_object_or_404(product.images, pk=img_pk) if request.method == 'POST': product_image.delete() messages.success( request, _('Deleted image %s') % product_image.image.name) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'product': product, 'product_image': product_image} return TemplateResponse( request, 'dashboard/product/modal_product_image_confirm_delete.html', ctx) @staff_member_required def variant_edit(request, product_pk, variant_pk=None): product = get_object_or_404(Product.objects.select_subclasses(), pk=product_pk) form_initial = {} if variant_pk: variant = get_object_or_404(product.variants.select_subclasses(), pk=variant_pk) else: variant = ProductVariant(product=product) form = forms.ProductVariantForm(request.POST or None, instance=variant, initial=form_initial) attribute_form = forms.VariantAttributeForm(request.POST or None, instance=variant) if all([form.is_valid(), attribute_form.is_valid()]): form.save() attribute_form.save() if variant_pk: msg = _('Updated variant %s') % variant.name else: msg = _('Added variant %s') % variant.name messages.success(request, msg) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'attribute_form': attribute_form, 'form': form, 'product': product, 'variant': variant} return TemplateResponse(request, 'dashboard/product/variant_form.html', ctx) @staff_member_required def variant_delete(request, product_pk, variant_pk): product = get_object_or_404(Product, pk=product_pk) variant = get_object_or_404(product.variants, pk=variant_pk) is_only_variant = product.variants.count() == 1 if request.method == 'POST': variant.delete() messages.success(request, _('Deleted variant %s') % variant.name) success_url = request.POST['success_url'] if is_safe_url(success_url, request.get_host()): return redirect(success_url) ctx = {'is_only_variant': is_only_variant, 'product': product, 'variant': variant} return TemplateResponse( request, 'dashboard/product/modal_product_variant_confirm_delete.html', ctx) @staff_member_required @require_http_methods(['POST']) def variants_bulk_delete(request, product_pk): product = get_object_or_404(Product, pk=product_pk) form = forms.VariantBulkDeleteForm(request.POST) if form.is_valid(): form.delete() success_url = request.POST['success_url'] messages.success(request, _('Deleted variants')) if is_safe_url(success_url, request.get_host()): return redirect(success_url) return redirect('dashboard:product-update', pk=product.pk) @staff_member_required def attribute_list(request): attributes = ProductAttribute.objects.prefetch_related('values') ctx = {'attributes': attributes} return TemplateResponse(request, 'dashboard/product/attributes/list.html', ctx) @staff_member_required def attribute_edit(request, pk=None): if pk: attribute = get_object_or_404(ProductAttribute, pk=pk) else: attribute = ProductAttribute() form = forms.ProductAttributeForm(request.POST or None, instance=attribute) formset = forms.AttributeChoiceValueFormset(request.POST or None, request.FILES or None, instance=attribute) if all([form.is_valid(), formset.is_valid()]): attribute = form.save() formset.save() msg = _('Updated attribute') if pk else _('Added attribute') messages.success(request, msg) return redirect('dashboard:product-attribute-update', pk=attribute.pk) ctx = {'attribute': attribute, 'form': form, 'formset': formset} return TemplateResponse(request, 'dashboard/product/attributes/form.html', ctx) @staff_member_required def attribute_delete(request, pk): attribute = get_object_or_404(ProductAttribute, pk=pk) if request.method == 'POST': attribute.delete() messages.success(request, _('Deleted attribute %s' % attribute.display)) return redirect('dashboard:product-attributes') ctx = {'attribute': attribute} return TemplateResponse( request, 'dashboard/product/attributes/modal_confirm_delete.html', ctx)
bsd-3-clause
anas-taji/purchase-workflow
__unported__/purchase_multi_picking/__openerp__.py
13
1765
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2012-2013 Agile Business Group sagl # (<http://www.agilebg.com>) # Copyright (C) 2012 Domsense srl (<http://www.domsense.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "Purchase multi picking", 'version': '0.2', 'category': 'Purchase Management', 'summary': "Multi Pickings from Purchase Orders", 'description': """ This module allows to generate several pickings from the same purchase order. You just have to indicate which order lines have to be grouped in the same picking. When confirming the order, for each group a picking is generated. """, 'author': "Agile Business Group,Odoo Community Association (OCA)", 'website': 'http://www.agilebg.com', 'license': 'AGPL-3', "depends": ['purchase', 'stock'], "data": [ 'purchase_view.xml', 'security/ir.model.access.csv', ], "demo": [], "active": False, "installable": False }
agpl-3.0
jcpowermac/ansible
lib/ansible/modules/packaging/os/dnf.py
23
17836
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright 2015 Cristian van Ee <cristian at cvee.org> # Copyright 2015 Igor Gnatenko <i.gnatenko.brain@gmail.com> # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'core'} DOCUMENTATION = ''' --- module: dnf version_added: 1.9 short_description: Manages packages with the I(dnf) package manager description: - Installs, upgrade, removes, and lists packages and groups with the I(dnf) package manager. options: name: description: - "A list of package names, or package specifier with version, like C(name-1.0) When using state=latest, this can be '*' which means run: dnf -y update. You can also pass a url or a local path to a rpm file." required: true default: null aliases: [] list: description: - Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples. required: false default: null state: description: - Whether to install (C(present), C(latest)), or remove (C(absent)) a package. required: false choices: [ "present", "latest", "absent" ] default: "present" enablerepo: description: - I(Repoid) of repositories to enable for the install/update operation. These repos will not persist beyond the transaction. When specifying multiple repos, separate them with a ",". required: false default: null aliases: [] disablerepo: description: - I(Repoid) of repositories to disable for the install/update operation. These repos will not persist beyond the transaction. When specifying multiple repos, separate them with a ",". required: false default: null aliases: [] conf_file: description: - The remote dnf configuration file to use for the transaction. required: false default: null aliases: [] disable_gpg_check: description: - Whether to disable the GPG checking of signatures of packages being installed. Has an effect only if state is I(present) or I(latest). required: false default: "no" choices: ["yes", "no"] aliases: [] installroot: description: - Specifies an alternative installroot, relative to which all packages will be installed. required: false version_added: "2.3" default: "/" autoremove: description: - If C(yes), removes all "leaf" packages from the system that were originally installed as dependencies of user-installed packages but which are no longer required by any such package. Should be used alone or when state is I(absent) required: false choices: [ "yes", "no" ] version_added: "2.4" notes: - When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option. requirements: - "python >= 2.6" - python-dnf - for the autoremove option you need dnf >= 2.0.1" author: - '"Igor Gnatenko (@ignatenkobrain)" <i.gnatenko.brain@gmail.com>' - '"Cristian van Ee (@DJMuggs)" <cristian at cvee.org>' - "Berend De Schouwer (github.com/berenddeschouwer)" ''' EXAMPLES = ''' - name: install the latest version of Apache dnf: name: httpd state: latest - name: remove the Apache package dnf: name: httpd state: absent - name: install the latest version of Apache from the testing repo dnf: name: httpd enablerepo: testing state: present - name: upgrade all packages dnf: name: "*" state: latest - name: install the nginx rpm from a remote repo dnf: name: 'http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm' state: present - name: install nginx rpm from a local file dnf: name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm state: present - name: install the 'Development tools' package group dnf: name: '@Development tools' state: present - name: Autoremove unneeded packages installed as dependencies dnf: autoremove: yes - name: Uninstall httpd but keep its dependencies dnf: name: httpd state: absent autoremove: no ''' import os try: import dnf import dnf.cli import dnf.const import dnf.exceptions import dnf.subject import dnf.util HAS_DNF = True except ImportError: HAS_DNF = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native from ansible.module_utils.six import PY2 from distutils.version import LooseVersion def _ensure_dnf(module): if not HAS_DNF: if PY2: package = 'python2-dnf' else: package = 'python3-dnf' if module.check_mode: module.fail_json(msg="`{0}` is not installed, but it is required" "for the Ansible dnf module.".format(package)) module.run_command(['dnf', 'install', '-y', package], check_rc=True) global dnf try: import dnf import dnf.cli import dnf.const import dnf.exceptions import dnf.subject import dnf.util except ImportError: module.fail_json(msg="Could not import the dnf python module. " "Please install `{0}` package.".format(package)) def _configure_base(module, base, conf_file, disable_gpg_check, installroot='/'): """Configure the dnf Base object.""" conf = base.conf # Turn off debug messages in the output conf.debuglevel = 0 # Set whether to check gpg signatures conf.gpgcheck = not disable_gpg_check # Don't prompt for user confirmations conf.assumeyes = True # Set installroot conf.installroot = installroot # Change the configuration file path if provided if conf_file: # Fail if we can't read the configuration file. if not os.access(conf_file, os.R_OK): module.fail_json( msg="cannot read configuration file", conf_file=conf_file) else: conf.config_file_path = conf_file # Read the configuration file conf.read() def _specify_repositories(base, disablerepo, enablerepo): """Enable and disable repositories matching the provided patterns.""" base.read_all_repos() repos = base.repos # Disable repositories for repo_pattern in disablerepo: for repo in repos.get_matching(repo_pattern): repo.disable() # Enable repositories for repo_pattern in enablerepo: for repo in repos.get_matching(repo_pattern): repo.enable() def _base(module, conf_file, disable_gpg_check, disablerepo, enablerepo, installroot): """Return a fully configured dnf Base object.""" base = dnf.Base() _configure_base(module, base, conf_file, disable_gpg_check, installroot) _specify_repositories(base, disablerepo, enablerepo) base.fill_sack(load_system_repo='auto') return base def _package_dict(package): """Return a dictionary of information for the package.""" # NOTE: This no longer contains the 'dnfstate' field because it is # already known based on the query type. result = { 'name': package.name, 'arch': package.arch, 'epoch': str(package.epoch), 'release': package.release, 'version': package.version, 'repo': package.repoid} result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format( **result) return result def list_items(module, base, command): """List package info based on the command.""" # Rename updates to upgrades if command == 'updates': command = 'upgrades' # Return the corresponding packages if command in ['installed', 'upgrades', 'available']: results = [ _package_dict(package) for package in getattr(base.sack.query(), command)()] # Return the enabled repository ids elif command in ['repos', 'repositories']: results = [ {'repoid': repo.id, 'state': 'enabled'} for repo in base.repos.iter_enabled()] # Return any matching packages else: packages = dnf.subject.Subject(command).get_best_query(base.sack) results = [_package_dict(package) for package in packages] module.exit_json(results=results) def _mark_package_install(module, base, pkg_spec): """Mark the package for install.""" try: base.install(pkg_spec) except dnf.exceptions.MarkingError: module.fail_json(msg="No package {0} available.".format(pkg_spec)) def _parse_spec_group_file(names): pkg_specs, grp_specs, filenames = [], [], [] for name in names: if name.endswith(".rpm"): filenames.append(name) elif name.startswith("@"): grp_specs.append(name[1:]) else: pkg_specs.append(name) return pkg_specs, grp_specs, filenames def _install_remote_rpms(base, filenames): if int(dnf.__version__.split(".")[0]) >= 2: pkgs = list(sorted(base.add_remote_rpms(list(filenames)), reverse=True)) else: pkgs = [] for filename in filenames: pkgs.append(base.add_remote_rpm(filename)) for pkg in pkgs: base.package_install(pkg) def ensure(module, base, state, names, autoremove): # Accumulate failures. Package management modules install what they can # and fail with a message about what they can't. failures = [] allow_erasing = False # Autoremove is called alone # Jump to remove path where base.autoremove() is run if not names and autoremove is not None: names = [] state = 'absent' if names == ['*'] and state == 'latest': base.upgrade_all() else: pkg_specs, group_specs, filenames = _parse_spec_group_file(names) if group_specs: base.read_comps() pkg_specs = [p.strip() for p in pkg_specs] filenames = [f.strip() for f in filenames] groups = [] environments = [] for group_spec in (g.strip() for g in group_specs): group = base.comps.group_by_pattern(group_spec) if group: groups.append(group.id) else: environment = base.comps.environment_by_pattern(group_spec) if environment: environments.append(environment.id) else: module.fail_json( msg="No group {0} available.".format(group_spec)) if state in ['installed', 'present']: # Install files. _install_remote_rpms(base, filenames) # Install groups. for group in groups: try: base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES) except dnf.exceptions.Error as e: # In dnf 2.0 if all the mandatory packages in a group do # not install, an error is raised. We want to capture # this but still install as much as possible. failures.append((group, to_native(e))) for environment in environments: try: base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES) except dnf.exceptions.Error as e: failures.append((environment, to_native(e))) # Install packages. for pkg_spec in pkg_specs: _mark_package_install(module, base, pkg_spec) elif state == 'latest': # "latest" is same as "installed" for filenames. _install_remote_rpms(base, filenames) for group in groups: try: try: base.group_upgrade(group) except dnf.exceptions.CompsError: # If not already installed, try to install. base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES) except dnf.exceptions.Error as e: failures.append((group, to_native(e))) for environment in environments: try: try: base.environment_upgrade(environment) except dnf.exceptions.CompsError: # If not already installed, try to install. base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES) except dnf.exceptions.Error as e: failures.append((environment, to_native(e))) for pkg_spec in pkg_specs: # best effort causes to install the latest package # even if not previously installed base.conf.best = True base.install(pkg_spec) else: # state == absent if autoremove is not None: base.conf.clean_requirements_on_remove = autoremove if filenames: module.fail_json( msg="Cannot remove paths -- please specify package name.") for group in groups: try: base.group_remove(group) except dnf.exceptions.CompsError: # Group is already uninstalled. pass for environment in environments: try: base.environment_remove(environment) except dnf.exceptions.CompsError: # Environment is already uninstalled. pass installed = base.sack.query().installed() for pkg_spec in pkg_specs: if installed.filter(name=pkg_spec): base.remove(pkg_spec) # Like the dnf CLI we want to allow recursive removal of dependent # packages allow_erasing = True if autoremove: base.autoremove() if not base.resolve(allow_erasing=allow_erasing): if failures: module.fail_json(msg='Failed to install some of the ' 'specified packages', failures=failures) module.exit_json(msg="Nothing to do") else: if module.check_mode: if failures: module.fail_json(msg='Failed to install some of the ' 'specified packages', failures=failures) module.exit_json(changed=True) base.download_packages(base.transaction.install_set) base.do_transaction() response = {'changed': True, 'results': []} for package in base.transaction.install_set: response['results'].append("Installed: {0}".format(package)) for package in base.transaction.remove_set: response['results'].append("Removed: {0}".format(package)) if failures: module.fail_json(msg='Failed to install some of the ' 'specified packages', failures=failures) module.exit_json(**response) def main(): """The main function.""" module = AnsibleModule( argument_spec=dict( name=dict(aliases=['pkg'], type='list'), state=dict( choices=[ 'absent', 'present', 'installed', 'removed', 'latest']), enablerepo=dict(type='list', default=[]), disablerepo=dict(type='list', default=[]), list=dict(), conf_file=dict(default=None, type='path'), disable_gpg_check=dict(default=False, type='bool'), installroot=dict(default='/', type='path'), autoremove=dict(type='bool'), ), required_one_of=[['name', 'list', 'autoremove']], mutually_exclusive=[['name', 'list'], ['autoremove', 'list']], supports_check_mode=True) params = module.params _ensure_dnf(module) # Check if autoremove is called correctly if params['autoremove'] is not None: if LooseVersion(dnf.__version__) < LooseVersion('2.0.1'): module.fail_json(msg="Autoremove requires dnf>=2.0.1. Current dnf version is %s" % dnf.__version__) if params['state'] not in ["absent", None]: module.fail_json(msg="Autoremove should be used alone or with state=absent") # Set state as installed by default # This is not set in AnsibleModule() because the following shouldn't happend # - dnf: autoremove=yes state=installed if params['state'] is None: params['state'] = 'installed' if params['list']: base = _base( module, params['conf_file'], params['disable_gpg_check'], params['disablerepo'], params['enablerepo'], params['installroot']) list_items(module, base, params['list']) else: # Note: base takes a long time to run so we want to check for failure # before running it. if not dnf.util.am_i_root(): module.fail_json(msg="This command has to be run under the root user.") base = _base( module, params['conf_file'], params['disable_gpg_check'], params['disablerepo'], params['enablerepo'], params['installroot']) ensure(module, base, params['state'], params['name'], params['autoremove']) if __name__ == '__main__': main()
gpl-3.0
germanovm/vdsm
client/vdsClientGluster.py
2
54610
# Copyright 2012 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # import hashlib import base64 import pprint as pp from vdsClient import service class GlusterService(service): def __init__(self): service.__init__(self) def do_glusterVolumeCreate(self, args): params = self._eqSplit(args) brickList = params.get('bricks', '').split(',') volumeName = params.get('volumeName', '') replicaCount = params.get('replica', '') stripeCount = params.get('stripe', '') transport = params.get('transport', '') transportList = transport.strip().split(',') if transport else [] force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeCreate(volumeName, brickList, replicaCount, stripeCount, transportList, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumesList(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteServer = params.get('remoteServer', '') status = self.s.glusterVolumesList(volumeName, remoteServer) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeStart(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeStart(volumeName, force) return status['status']['code'], status['status']['message'] def do_glusterVolumeStop(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeStop(volumeName, force) return status['status']['code'], status['status']['message'] def do_glusterVolumeBrickAdd(self, args): params = self._eqSplit(args) brickList = params.get('bricks', '').split(',') volumeName = params.get('volumeName', '') replicaCount = params.get('replica', '') stripeCount = params.get('stripe', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeBrickAdd(volumeName, brickList, replicaCount, stripeCount, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSet(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') option = params.get('option', '') value = params.get('value', '') status = self.s.glusterVolumeSet(volumeName, option, value) return status['status']['code'], status['status']['message'] def do_glusterVolumeSetOptionsList(self, args): status = self.s.glusterVolumeSetOptionsList() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeReset(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') option = params.get('option', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeReset(volumeName, option, force) return status['status']['code'], status['status']['message'] def do_glusterHostAdd(self, args): params = self._eqSplit(args) hostName = params.get('hostName', '') status = self.s.glusterHostAdd(hostName) return status['status']['code'], status['status']['message'] def do_glusterVolumeRebalanceStart(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') rebalanceType = params.get('rebalanceType', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeRebalanceStart(volumeName, rebalanceType, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeRebalanceStop(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeRebalanceStop(volumeName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeRebalanceStatus(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeRebalanceStatus(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeDelete(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeDelete(volumeName) return status['status']['code'], status['status']['message'] def do_glusterHostRemove(self, args): params = self._eqSplit(args) hostName = params.get('hostName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterHostRemove(hostName, force) return status['status']['code'], status['status']['message'] def do_glusterHostRemoveByUuid(self, args): params = self._eqSplit(args) hostUuid = params.get('hostUuid', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterHostRemoveByUuid(hostUuid, force) return status['status']['code'], status['status']['message'] def do_glusterVolumeReplaceBrickCommitForce(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') existingBrick = params.get('existingBrick', '') newBrick = params.get('newBrick', '') status = self.s.glusterVolumeReplaceBrickCommitForce(volumeName, existingBrick, newBrick) return status['status']['code'], status['status']['message'] def do_glusterVolumeRemoveBrickStart(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brickList = params('bricks', '').split(',') replicaCount = params.get('replica', '') status = self.s.glusterVolumeRemoveBrickStart(volumeName, brickList, replicaCount) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeRemoveBrickStop(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brickList = params.get('bricks', '').split(',') replicaCount = params.get('replica', '') status = self.s.glusterVolumeRemoveBrickStop(volumeName, brickList, replicaCount) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeRemoveBrickStatus(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brickList = params.get('bricks', '').split(',') replicaCount = params.get('replica', '') status = self.s.glusterVolumeRemoveBrickStatus(volumeName, brickList, replicaCount) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeRemoveBrickCommit(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brickList = params.get('bricks', '').split(',') replicaCount = params.get('replica', '') status = self.s.glusterVolumeRemoveBrickCommit(volumeName, brickList, replicaCount) return status['status']['code'], status['status']['message'] def do_glusterVolumeRemoveBrickForce(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brickList = params.get('bricks', '').split(',') replicaCount = params.get('replica', '') status = self.s.glusterVolumeRemoveBrickForce(volumeName, brickList, replicaCount) return status['status']['code'], status['status']['message'] def do_glusterVolumeStatus(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') brick = params.get('brick', '') option = params.get('option', '') status = self.s.glusterVolumeStatus(volumeName, brick, option) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHostsList(self, args): status = self.s.glusterHostsList() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeProfileStart(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeProfileStart(volumeName) return status['status']['code'], status['status']['message'] def do_glusterVolumeProfileStop(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeProfileStop(volumeName) return status['status']['code'], status['status']['message'] def do_glusterVolumeProfileInfo(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') nfs = (params.get('nfs', 'no').upper() == 'YES') status = self.s.glusterVolumeProfileInfo(volumeName, nfs) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHooksList(self, args): status = self.s.glusterHooksList() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHookEnable(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') status = self.s.glusterHookEnable(glusterCmd, level, hookName) return status['status']['code'], status['status']['message'] def do_glusterHookDisable(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') status = self.s.glusterHookDisable(glusterCmd, level, hookName) return status['status']['code'], status['status']['message'] def do_glusterHookRead(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') status = self.s.glusterHookRead(glusterCmd, level, hookName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHookUpdate(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') hookFile = params.get('hookFile', '') with open(hookFile, 'r') as f: hookData = f.read() content = base64.b64encode(hookData) md5sum = hashlib.md5(hookData).hexdigest() status = self.s.glusterHookUpdate(glusterCmd, level, hookName, content, md5sum) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHookAdd(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') hookFile = params.get('hookFile', '') hookEnable = False if params.get('enable', '').upper() == 'TRUE': hookEnable = True with open(hookFile, 'r') as f: hookData = f.read() md5sum = hashlib.md5(hookData).hexdigest() content = base64.b64encode(hookData) status = self.s.glusterHookAdd(glusterCmd, level, hookName, content, md5sum, hookEnable) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHookRemove(self, args): params = self._eqSplit(args) glusterCmd = params.get('command', '') level = params.get('level', '') hookName = params.get('hookName', '') status = self.s.glusterHookRemove(glusterCmd, level, hookName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterHostUUIDGet(self, args): status = self.s.glusterHostUUIDGet() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterServicesAction(self, args): params = self._eqSplit(args) serviceNames = params.get('serviceNames', '').split(',') action = params.get('action', '') status = self.s.glusterServicesAction(serviceNames, action) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterServicesGet(self, args): params = self._eqSplit(args) serviceNames = params.get('serviceNames', '').split(',') status = self.s.glusterServicesGet(serviceNames) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterTasksList(self, args): params = self._eqSplit(args) taskIds = params.get('taskIds', '') if taskIds: taskIds = taskIds.split(",") else: taskIds = [] status = self.s.glusterTasksList(taskIds) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeStatsInfoGet(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeStatsInfoGet(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterStorageDevicesList(self, args): status = self.s.glusterStorageDevicesList() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionStart(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteUserName = params.get('remoteUserName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeGeoRepSessionStart(volumeName, remoteHost, remoteVolumeName, remoteUserName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionStop(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteUserName = params.get('remoteUserName', '') remoteVolumeName = params.get('remoteVolumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeGeoRepSessionStop(volumeName, remoteHost, remoteVolumeName, remoteUserName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionList(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteUserName = params.get('remoteUserName', '') remoteVolumeName = params.get('remoteVolumeName', '') status = self.s.glusterVolumeGeoRepSessionList(volumeName, remoteHost, remoteVolumeName, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionStatus(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteUserName = params.get('remoteUserName', '') remoteVolumeName = params.get('remoteVolumeName', '') status = self.s.glusterVolumeGeoRepSessionStatus(volumeName, remoteHost, remoteVolumeName, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionPause(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') remoteUserName = params.get('remoteUserName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeGeoRepSessionPause(volumeName, remoteHost, remoteVolumeName, remoteUserName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionResume(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') remoteUserName = params.get('remoteUserName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeGeoRepSessionResume(volumeName, remoteHost, remoteVolumeName, remoteUserName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepConfigList(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteUserName = params.get('remoteUserName', '') remoteVolumeName = params.get('remoteVolumeName', '') status = self.s.glusterVolumeGeoRepConfigList(volumeName, remoteHost, remoteVolumeName, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepConfigSet(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') remoteUserName = params.get('remoteUserName', '') optionName = params.get('optionName', '') optionValue = params.get('optionValue', '') status = self.s.glusterVolumeGeoRepConfigSet(volumeName, remoteHost, remoteVolumeName, optionName, optionValue, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepConfigReset(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') remoteUserName = params.get('remoteUserName', '') optionName = params.get('optionName', '') status = self.s.glusterVolumeGeoRepConfigReset(volumeName, remoteHost, remoteVolumeName, optionName, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSnapshotCreate(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') snapName = params.get('snapName', '') snapDescription = params.get('snapDescription', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeSnapshotCreate(volumeName, snapName, snapDescription, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotDelete(self, args): params = self._eqSplit(args) snapName = params.get('snapName', '') status = self.s.glusterSnapshotDelete(snapName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSnapshotDeleteAll(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeSnapshotDeleteAll(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotActivate(self, args): params = self._eqSplit(args) snapName = params.get('snapName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterSnapshotActivate(snapName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotDeactivate(self, args): params = self._eqSplit(args) snapName = params.get('snapName', '') status = self.s.glusterSnapshotDeactivate(snapName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotRestore(self, args): params = self._eqSplit(args) snapName = params.get('snapName', '') status = self.s.glusterSnapshotRestore(snapName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSnapshotConfigList(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeSnapshotConfigList(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotConfigList(self, args): status = self.s.glusterSnapshotConfigList() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSnapshotConfigSet(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') optionName = params.get('optionName', '') optionValue = params.get('optionValue', '') status = self.s.glusterVolumeSnapshotConfigSet(volumeName, optionName, optionValue) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotConfigSet(self, args): params = self._eqSplit(args) optionName = params.get('optionName', '') optionValue = params.get('optionValue', '') status = self.s.glusterSnapshotConfigSet(optionName, optionValue) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeSnapshotList(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeSnapshotList(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterCreateBrick(self, args): params = self._eqSplit(args) devList = params.get('devices', '').split(',') brickName = params.get('brickName', '') mountPoint = params.get('mountPoint', '') fsType = params.get('fsType', '') raidType = params.get('raidType', '') raidParams = {} if raidType: raidParams['type'] = raidType.upper() raidParams['stripeSize'] = int(params.get('stripeSize', 0)) raidParams['pdCount'] = int(params.get('pdCount', 0)) status = self.s.glusterCreateBrick(brickName, mountPoint, devList, fsType, raidParams) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterGeoRepKeysGet(self, args): status = self.s.glusterGeoRepKeysGet() pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterGeoRepKeysUpdate(self, args): params = self._eqSplit(args) userName = params.get('userName', '') geoRepPubKeys = params.get('geoRepPubKeys', '') status = self.s.glusterGeoRepKeysUpdate(userName, geoRepPubKeys.split('\\n')) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterGeoRepMountBrokerSetup(self, args): params = self._eqSplit(args) remoteUserName = params.get('remoteUserName', '') remoteVolumeName = params.get('remoteVolumeName', '') remoteGroupName = params.get('remoteGroupName', '') partial = (params.get('partial', 'no').upper() == 'YES') status = self.s.glusterGeoRepMountBrokerSetup(remoteUserName, remoteGroupName, remoteVolumeName, partial) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionCreate(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteUserName = params.get('remoteUserName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterVolumeGeoRepSessionCreate(volumeName, remoteHost, remoteVolumeName, remoteUserName, force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeGeoRepSessionDelete(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') remoteUserName = params.get('remoteUserName', '') remoteHost = params.get('remoteHost', '') remoteVolumeName = params.get('remoteVolumeName', '') status = self.s.glusterVolumeGeoRepSessionDelete(volumeName, remoteHost, remoteVolumeName, remoteUserName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterVolumeEmptyCheck(self, args): params = self._eqSplit(args) volumeName = params.get('volumeName', '') status = self.s.glusterVolumeEmptyCheck(volumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterMetaVolumeMount(self, args): params = self._eqSplit(args) metaVolumeName = params.get('metaVolumeName', '') status = self.s.glusterMetaVolumeMount(metaVolumeName) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotScheduleOverride(self, args): params = self._eqSplit(args) force = (params.get('force', 'no').upper() == 'YES') status = self.s.glusterSnapshotScheduleOverride(force) pp.pprint(status) return status['status']['code'], status['status']['message'] def do_glusterSnapshotScheduleReset(self, args): status = self.s.glusterSnapshotScheduleReset() pp.pprint(status) return status['status']['code'], status['status']['message'] def getGlusterCmdDict(serv): return \ {'glusterVolumeCreate': ( serv.do_glusterVolumeCreate, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>] [stripe=<count>] [transport={tcp|rdma}] ' '[force={yes|no}]\n\t' '<volume_name> is name of new volume', '<brick[,brick, ...]> is brick(s) which will be used to ' 'create volume', 'create gluster volume' )), 'glusterVolumesList': ( serv.do_glusterVolumesList, ('[volumeName=<volume_name>]\n\t' '[remoteServer=<remote_server]\n\t' '<volume_name> is existing volume name ' '<remote_server> is a remote host name ', 'list all or given gluster volume details' )), 'glusterVolumeStart': ( serv.do_glusterVolumeStart, ('volumeName=<volume_name> [force={yes|no}]\n\t' '<volume_name> is existing volume name', 'start gluster volume' )), 'glusterVolumeStop': ( serv.do_glusterVolumeStop, ('volumeName=<volume_name> [force={yes|no}]\n\t' '<volume_name> is existing volume name', 'stop gluster volume' )), 'glusterVolumeBrickAdd': ( serv.do_glusterVolumeBrickAdd, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>] [stripe=<count>] [force={yes|no}]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is new brick(s) which will be added to ' 'the volume', 'add bricks to gluster volume' )), 'glusterVolumeSet': ( serv.do_glusterVolumeSet, ('volumeName=<volume_name> option=<option> value=<value>\n\t' '<volume_name> is existing volume name\n\t' '<option> is volume option\n\t' '<value> is value to volume option', 'set gluster volume option' )), 'glusterVolumeSetOptionsList': ( serv.do_glusterVolumeSetOptionsList, ('', 'list gluster volume set options' )), 'glusterVolumeReset': ( serv.do_glusterVolumeReset, ('volumeName=<volume_name> [option=<option>] [force={yes|no}]\n\t' '<volume_name> is existing volume name', 'reset gluster volume or volume option' )), 'glusterHostAdd': ( serv.do_glusterHostAdd, ('hostName=<host>\n\t' '<host> is hostname or ip address of new server', 'add new server to gluster cluster' )), 'glusterVolumeRebalanceStart': ( serv.do_glusterVolumeRebalanceStart, ('volumeName=<volume_name> [rebalanceType=fix-layout] ' '[force={yes|no}]\n\t' '<volume_name> is existing volume name', 'start volume rebalance' )), 'glusterVolumeRebalanceStop': ( serv.do_glusterVolumeRebalanceStop, ('volumeName=<volume_name> [force={yes|no}]\n\t' '<volume_name> is existing volume name', 'stop volume rebalance' )), 'glusterVolumeRebalanceStatus': ( serv.do_glusterVolumeRebalanceStatus, ('volumeName=<volume_name>\n\t' '<volume_name> is existing volume name', 'get volume rebalance status' )), 'glusterVolumeDelete': ( serv.do_glusterVolumeDelete, ('volumeName=<volume_name> \n\t<volume_name> is existing ' 'volume name', 'delete gluster volume' )), 'glusterHostRemove': ( serv.do_glusterHostRemove, ('hostName=<host> [force={yes|no}]\n\t' '<host> is hostname or ip address of a server in ' 'gluster cluster', 'remove server from gluster cluster' )), 'glusterHostRemoveByUuid': ( serv.do_glusterHostRemoveByUuid, ('hostUuid=<hostUuid> [force={yes|no}]\n\t' '<hostUuid> is UUID of the host in ' 'gluster cluster', 'remove server from gluster cluster' )), 'glusterVolumeReplaceBrickCommitForce': ( serv.do_glusterVolumeReplaceBrickCommitForce, ('volumeName=<volume_name> existingBrick=<existing_brick> ' 'newBrick=<new_brick> \n\t' '<volume_name> is existing volume name\n\t' '<existing_brick> is existing brick\n\t' '<new_brick> is new brick', 'commit volume replace brick' )), 'glusterVolumeRemoveBrickStart': ( serv.do_glusterVolumeRemoveBrickStart, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is existing brick(s)', 'start volume remove bricks' )), 'glusterVolumeRemoveBrickStop': ( serv.do_glusterVolumeRemoveBrickStop, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is existing brick(s)', 'stop volume remove bricks' )), 'glusterVolumeRemoveBrickStatus': ( serv.do_glusterVolumeRemoveBrickStatus, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is existing brick(s)', 'get volume remove bricks status' )), 'glusterVolumeRemoveBrickCommit': ( serv.do_glusterVolumeRemoveBrickCommit, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is existing brick(s)', 'commit volume remove bricks' )), 'glusterVolumeRemoveBrickForce': ( serv.do_glusterVolumeRemoveBrickForce, ('volumeName=<volume_name> bricks=<brick[,brick, ...]> ' '[replica=<count>]\n\t' '<volume_name> is existing volume name\n\t' '<brick[,brick, ...]> is existing brick(s)', 'force volume remove bricks' )), 'glusterVolumeStatus': ( serv.do_glusterVolumeStatus, ('volumeName=<volume_name> [brick=<existing_brick>] ' '[option={detail | clients | mem}]\n\t' '<volume_name> is existing volume name\n\t' 'option=detail gives brick detailed status\n\t' 'option=clients gives clients status\n\t' 'option=mem gives memory status\n\t', 'get volume status of given volume with its all brick or ' 'specified brick' )), 'glusterHostsList': ( serv.do_glusterHostsList, ('', 'list host info' )), 'glusterVolumeProfileStart': ( serv.do_glusterVolumeProfileStart, ('volumeName=<volume_name>\n\t' '<volume_name> is existing volume name', 'start gluster volume profile' )), 'glusterVolumeProfileStop': ( serv.do_glusterVolumeProfileStop, ('volumeName=<volume_name>\n\t' '<volume_name> is existing volume name', 'stop gluster volume profile' )), 'glusterVolumeProfileInfo': ( serv.do_glusterVolumeProfileInfo, ('volumeName=<volume_name> [nfs={yes|no}]\n\t' '<volume_name> is existing volume name', 'get gluster volume profile info' )), 'glusterHooksList': ( serv.do_glusterHooksList, ('', 'list hooks info' )), 'glusterHookEnable': ( serv.do_glusterHookEnable, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name>\n\t' '<hook_name> is an existing hook name', 'Enable hook script' )), 'glusterHookDisable': ( serv.do_glusterHookDisable, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name>\n\t' '<hook_name> is an existing hook name', 'Disable hook script' )), 'glusterHookRead': ( serv.do_glusterHookRead, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name>\n\t' '<hook_name> is an existing hook name', 'Read hook script' )), 'glusterHookUpdate': ( serv.do_glusterHookUpdate, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name> hookFile=<hook_file>\n\t' '<hook_name> is an existing hook name', '<hook_file> is the input hook file name contains hook data', 'Update hook script' )), 'glusterHookAdd': ( serv.do_glusterHookAdd, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name> hookFile=<hook_file> ' ' enable={true|false}\n\t' '<hook_name> is a new hook name', '<hook_file> is the input hook file name contains hook data', 'Add hook script' )), 'glusterHookRemove': ( serv.do_glusterHookRemove, ('command=<gluster_command> level={pre|post} ' 'hookName=<hook_name>\n\t' '<hook_name> is an existing hook name', 'Remove hook script' )), 'glusterHostUUIDGet': ( serv.do_glusterHostUUIDGet, ('', 'get gluster UUID of the host' )), 'glusterServicesAction': ( serv.do_glusterServicesAction, ('serviceNames=<service1[,service2,..]> action=<action>\n\t', 'serviceNames - list of services on which action needs ' 'to be performed', 'action can be start/stop or restart', 'Performs start/stop/restart of gluster services' )), 'glusterServicesGet': ( serv.do_glusterServicesGet, ('serviceNames=<service1[,service2,..]>', 'Returns status of all gluster services if serviceName is ' 'not set' '(swift, glusterd, smb, memcached)' )), 'glusterTasksList': ( serv.do_glusterTasksList, ('[taskIds=<task_id1,task_id2,..>]', 'list all or given gluster tasks' )), 'glusterVolumeStatsInfoGet': ( serv.do_glusterVolumeStatsInfoGet, ('volumeName=<volume name>', 'Returns total, free and used space(bytes) of gluster volume' )), 'glusterStorageDevicesList': ( serv.do_glusterStorageDevicesList, ('', 'list all disk info of the host' )), 'glusterVolumeGeoRepSessionStart': ( serv.do_glusterVolumeGeoRepSessionStart, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '[force={yes|no}]\n\t' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'start the geo-replication session' )), 'glusterVolumeGeoRepSessionStop': ( serv.do_glusterVolumeGeoRepSessionStop, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '[force={yes|no}]\n\t' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'stop the geo-replication session' )), 'glusterVolumeGeoRepSessionList': ( serv.do_glusterVolumeGeoRepSessionList, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'list the geo-replication sessions' )), 'glusterVolumeGeoRepSessionStatus': ( serv.do_glusterVolumeGeoRepSessionStatus, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'get the geo-replication session status' )), 'glusterVolumeGeoRepSessionPause': ( serv.do_glusterVolumeGeoRepSessionPause, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '[force={yes|no}]\n\t' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'pause the geo-replication session' )), 'glusterVolumeGeoRepSessionResume': ( serv.do_glusterVolumeGeoRepSessionResume, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '[force={yes|no}]\n\t' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'resume the geo-replication session' )), 'glusterVolumeGeoRepConfigList': ( serv.do_glusterVolumeGeoRepConfigList, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'remoteUserName=<remote_user_name>' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'Get the list of geo-replication configuration for a session' )), 'glusterVolumeGeoRepConfigSet': ( serv.do_glusterVolumeGeoRepConfigSet, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'optionName=<option_name>' 'optionValue=<option_value>' 'remoteUserName=<remote_user_name>' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'set a geo-replication configuration for a session' )), 'glusterVolumeGeoRepConfigReset': ( serv.do_glusterVolumeGeoRepConfigReset, ('volumeName=<volume_name> ' 'remoteHost=<remote_host> ' 'remoteVolumeName=<remote_volume_name> ' 'optionName=<option_name>' 'remoteUserName=<remote_user_name>' '<remote_host> is IP/dns name of host in remote Gluster cluster.' '<remote_volume_name> volume name in remote gluster cluster.', 'Reset a geo-replication configuration to its default value' )), 'glusterVolumeSnapshotCreate': ( serv.do_glusterVolumeSnapshotCreate, ('volumeName=<volume_name> snapName=<snap_name> ' '[snapDescription=<description of snapshot>] ' '[force={yes|no}]', 'create gluster volume snapshot' )), 'glusterVolumeSnapshotDeleteAll': ( serv.do_glusterVolumeSnapshotDeleteAll, ('volumeName=<volume name>', 'delete all snapshots for given volume' )), 'glusterVolumeSnapshotConfigList': ( serv.do_glusterVolumeSnapshotConfigList, ('volumeName=<volume_name>', 'get gluster volume snapshot configuration' )), 'glusterVolumeSnapshotConfigSet': ( serv.do_glusterVolumeSnapshotConfigSet, ('volumeName=<volume_name>' 'optionName=<option_name>' 'optionValue=<option_value>', 'Set gluster snapshot configuration at volume leval' )), 'glusterSnapshotDelete': ( serv.do_glusterSnapshotDelete, ('snapName=<snap_name>', 'delete gluster volume snapshot' )), 'glusterSnapshotActivate': ( serv.do_glusterSnapshotActivate, ('snapName=<snap_name> ' '[force={yes|no}]', 'activate snapshot' )), 'glusterSnapshotDeactivate': ( serv.do_glusterSnapshotDeactivate, ('snapName=<snap_name>', 'de-activate snapshot' )), 'glusterSnapshotRestore': ( serv.do_glusterSnapshotRestore, ('snapName=snap_name', 'restore snapshot' )), 'glusterSnapshotConfigList': ( serv.do_glusterSnapshotConfigList, ('', 'get gluster volume snapshot configuration' )), 'glusterSnapshotConfigSet': ( serv.do_glusterSnapshotConfigSet, ('optionName=<option_name>' 'optionValue=<option_value>', 'Set gluster snapshot configuration at cluster leval' )), 'glusterVolumeSnapshotList': ( serv.do_glusterVolumeSnapshotList, ('[volumeName=<volume_name>]', 'snapshot list for given volume' )), 'glusterCreateBrick': ( serv.do_glusterCreateBrick, ('brickName=<brick_name> mountPoint=<mountPoint> ' 'devices=<device[,device, ...]> ' '[raidType=<raid_type>] [stripeSize=<stripe_size>] ' '[fsType=<fs_type>] [pdCount=<pd_count>] \n\n' '<brick_name> is the name of the brick\n' '<mountPoint> device mount point\n' '<device[,device, ...]> is the list of device name(s)\n' '<fsType> is the file system type of the brick \n' '<raid_type> is the type of raid like 6 or 10 or 0\n' '<stripe_size> is the stripe unit size\n' '<pd_count> is the total number of physical ' 'disks used in the raid\n' '<raid_type>, <stripe_size> and <pd_count> ' 'are the optional parameters\n', 'This will create a brick using given input devices' )), 'glusterGeoRepKeysGet': ( serv.do_glusterGeoRepKeysGet, ('', 'get geo replication public keys for all nodes in cluster' )), 'glusterGeoRepKeysUpdate': ( serv.do_glusterGeoRepKeysUpdate, ('userName=user_name' 'geoRepPubKeys=geo_replication_pub_keys', 'update geo replication public keys to authorized' ' keys file of user' )), 'glusterGeoRepMountBrokerSetup': ( serv.do_glusterGeoRepMountBrokerSetup, ('remoteUserName=remote_user_name' 'remoteVolumeName=remote_volume_name' 'remoteGroupName=remote_group_name', '[partial={yes|no}]' 'setup mount broker for geo replication' )), 'glusterVolumeGeoRepSessionCreate': ( serv.do_glusterVolumeGeoRepSessionCreate, ('volumeName=<master_volume_name> ' 'remoteUserName=<remote_user_name>' 'remoteHost=<slave_host_name> ' 'remoteVolumeName=<slave_volume_name> ' '[force={yes|no}]\n\t' '<master_volume_name>existing volume name in the master node\n\t' '<slave_host_name>is remote slave host name or ip\n\t' '<slave_volume_name>existing volume name in the slave node', 'Create the geo-replication session' )), 'glusterVolumeGeoRepSessionDelete': ( serv.do_glusterVolumeGeoRepSessionDelete, ('volumeName=<master_volume_name> ' 'remoteUserName=<remote_user_name>' 'remoteHost=<slave_host_name> ' 'remoteVolumeName=<slave_volume_name> ' '<master_volume_name>existing volume name in the master node\n\t' '<slave_host_name>is remote slave host name or ip\n\t' '<slave_volume_name>existing volume name in the slave node', 'Delete the geo-replication session' )), 'glusterVolumeEmptyCheck': ( serv.do_glusterVolumeEmptyCheck, ('volumeName=<volume name>', 'Check if the given volume is empty or not' )), 'glusterMetaVolumeMount': ( serv.do_glusterMetaVolumeMount, ('[volumeName=<volume name>]', 'mount the meta-volume' )), 'glusterSnapshotScheduleOverride': ( serv.do_glusterSnapshotScheduleOverride, ('[force={yes|no}]\n\t', 'override gluster snapshot scheduling' )), 'glusterSnapshotScheduleReset': ( serv.do_glusterSnapshotScheduleReset, ('', 'Reset gluster snapshot scheduling' )) }
gpl-2.0
rahuldan/sympy
sympy/strategies/branch/tests/test_core.py
40
2366
from sympy.strategies.branch.core import (exhaust, debug, multiplex, condition, notempty, chain, onaction, sfilter, yieldify, do_one, identity) from sympy.core.compatibility import get_function_name, range def posdec(x): if x > 0: yield x-1 else: yield x def branch5(x): if 0 < x < 5: yield x-1 elif 5 < x < 10: yield x+1 elif x == 5: yield x+1 yield x-1 else: yield x even = lambda x: x%2 == 0 def inc(x): yield x + 1 def one_to_n(n): for i in range(n): yield i def test_exhaust(): brl = exhaust(branch5) assert set(brl(3)) == {0} assert set(brl(7)) == {10} assert set(brl(5)) == {0, 10} def test_debug(): from sympy.core.compatibility import StringIO file = StringIO() rl = debug(posdec, file) list(rl(5)) log = file.getvalue() file.close() assert get_function_name(posdec) in log assert '5' in log assert '4' in log def test_multiplex(): brl = multiplex(posdec, branch5) assert set(brl(3)) == {2} assert set(brl(7)) == {6, 8} assert set(brl(5)) == {4, 6} def test_condition(): brl = condition(even, branch5) assert set(brl(4)) == set(branch5(4)) assert set(brl(5)) == set([]) def test_sfilter(): brl = sfilter(even, one_to_n) assert set(brl(10)) == {0, 2, 4, 6, 8} def test_notempty(): def ident_if_even(x): if even(x): yield x brl = notempty(ident_if_even) assert set(brl(4)) == {4} assert set(brl(5)) == {5} def test_chain(): assert list(chain()(2)) == [2] # identity assert list(chain(inc, inc)(2)) == [4] assert list(chain(branch5, inc)(4)) == [4] assert set(chain(branch5, inc)(5)) == {5, 7} assert list(chain(inc, branch5)(5)) == [7] def test_onaction(): L = [] def record(fn, input, output): L.append((input, output)) list(onaction(inc, record)(2)) assert L == [(2, 3)] list(onaction(identity, record)(2)) assert L == [(2, 3)] def test_yieldify(): inc = lambda x: x + 1 yinc = yieldify(inc) assert list(yinc(3)) == [4] def test_do_one(): def bad(expr): raise ValueError() yield False assert list(do_one(inc)(3)) == [4] assert list(do_one(inc, bad)(3)) == [4] assert list(do_one(inc, posdec)(3)) == [4]
bsd-3-clause
shrimpboyho/git.js
emscript/python/2.7.5.1_32bit/Lib/unittest/test/test_setups.py
152
16460
import sys from cStringIO import StringIO import unittest def resultFactory(*_): return unittest.TestResult() class TestSetups(unittest.TestCase): def getRunner(self): return unittest.TextTestRunner(resultclass=resultFactory, stream=StringIO()) def runTests(self, *cases): suite = unittest.TestSuite() for case in cases: tests = unittest.defaultTestLoader.loadTestsFromTestCase(case) suite.addTests(tests) runner = self.getRunner() # creating a nested suite exposes some potential bugs realSuite = unittest.TestSuite() realSuite.addTest(suite) # adding empty suites to the end exposes potential bugs suite.addTest(unittest.TestSuite()) realSuite.addTest(unittest.TestSuite()) return runner.run(realSuite) def test_setup_class(self): class Test(unittest.TestCase): setUpCalled = 0 @classmethod def setUpClass(cls): Test.setUpCalled += 1 unittest.TestCase.setUpClass() def test_one(self): pass def test_two(self): pass result = self.runTests(Test) self.assertEqual(Test.setUpCalled, 1) self.assertEqual(result.testsRun, 2) self.assertEqual(len(result.errors), 0) def test_teardown_class(self): class Test(unittest.TestCase): tearDownCalled = 0 @classmethod def tearDownClass(cls): Test.tearDownCalled += 1 unittest.TestCase.tearDownClass() def test_one(self): pass def test_two(self): pass result = self.runTests(Test) self.assertEqual(Test.tearDownCalled, 1) self.assertEqual(result.testsRun, 2) self.assertEqual(len(result.errors), 0) def test_teardown_class_two_classes(self): class Test(unittest.TestCase): tearDownCalled = 0 @classmethod def tearDownClass(cls): Test.tearDownCalled += 1 unittest.TestCase.tearDownClass() def test_one(self): pass def test_two(self): pass class Test2(unittest.TestCase): tearDownCalled = 0 @classmethod def tearDownClass(cls): Test2.tearDownCalled += 1 unittest.TestCase.tearDownClass() def test_one(self): pass def test_two(self): pass result = self.runTests(Test, Test2) self.assertEqual(Test.tearDownCalled, 1) self.assertEqual(Test2.tearDownCalled, 1) self.assertEqual(result.testsRun, 4) self.assertEqual(len(result.errors), 0) def test_error_in_setupclass(self): class BrokenTest(unittest.TestCase): @classmethod def setUpClass(cls): raise TypeError('foo') def test_one(self): pass def test_two(self): pass result = self.runTests(BrokenTest) self.assertEqual(result.testsRun, 0) self.assertEqual(len(result.errors), 1) error, _ = result.errors[0] self.assertEqual(str(error), 'setUpClass (%s.BrokenTest)' % __name__) def test_error_in_teardown_class(self): class Test(unittest.TestCase): tornDown = 0 @classmethod def tearDownClass(cls): Test.tornDown += 1 raise TypeError('foo') def test_one(self): pass def test_two(self): pass class Test2(unittest.TestCase): tornDown = 0 @classmethod def tearDownClass(cls): Test2.tornDown += 1 raise TypeError('foo') def test_one(self): pass def test_two(self): pass result = self.runTests(Test, Test2) self.assertEqual(result.testsRun, 4) self.assertEqual(len(result.errors), 2) self.assertEqual(Test.tornDown, 1) self.assertEqual(Test2.tornDown, 1) error, _ = result.errors[0] self.assertEqual(str(error), 'tearDownClass (%s.Test)' % __name__) def test_class_not_torndown_when_setup_fails(self): class Test(unittest.TestCase): tornDown = False @classmethod def setUpClass(cls): raise TypeError @classmethod def tearDownClass(cls): Test.tornDown = True raise TypeError('foo') def test_one(self): pass self.runTests(Test) self.assertFalse(Test.tornDown) def test_class_not_setup_or_torndown_when_skipped(self): class Test(unittest.TestCase): classSetUp = False tornDown = False @classmethod def setUpClass(cls): Test.classSetUp = True @classmethod def tearDownClass(cls): Test.tornDown = True def test_one(self): pass Test = unittest.skip("hop")(Test) self.runTests(Test) self.assertFalse(Test.classSetUp) self.assertFalse(Test.tornDown) def test_setup_teardown_order_with_pathological_suite(self): results = [] class Module1(object): @staticmethod def setUpModule(): results.append('Module1.setUpModule') @staticmethod def tearDownModule(): results.append('Module1.tearDownModule') class Module2(object): @staticmethod def setUpModule(): results.append('Module2.setUpModule') @staticmethod def tearDownModule(): results.append('Module2.tearDownModule') class Test1(unittest.TestCase): @classmethod def setUpClass(cls): results.append('setup 1') @classmethod def tearDownClass(cls): results.append('teardown 1') def testOne(self): results.append('Test1.testOne') def testTwo(self): results.append('Test1.testTwo') class Test2(unittest.TestCase): @classmethod def setUpClass(cls): results.append('setup 2') @classmethod def tearDownClass(cls): results.append('teardown 2') def testOne(self): results.append('Test2.testOne') def testTwo(self): results.append('Test2.testTwo') class Test3(unittest.TestCase): @classmethod def setUpClass(cls): results.append('setup 3') @classmethod def tearDownClass(cls): results.append('teardown 3') def testOne(self): results.append('Test3.testOne') def testTwo(self): results.append('Test3.testTwo') Test1.__module__ = Test2.__module__ = 'Module' Test3.__module__ = 'Module2' sys.modules['Module'] = Module1 sys.modules['Module2'] = Module2 first = unittest.TestSuite((Test1('testOne'),)) second = unittest.TestSuite((Test1('testTwo'),)) third = unittest.TestSuite((Test2('testOne'),)) fourth = unittest.TestSuite((Test2('testTwo'),)) fifth = unittest.TestSuite((Test3('testOne'),)) sixth = unittest.TestSuite((Test3('testTwo'),)) suite = unittest.TestSuite((first, second, third, fourth, fifth, sixth)) runner = self.getRunner() result = runner.run(suite) self.assertEqual(result.testsRun, 6) self.assertEqual(len(result.errors), 0) self.assertEqual(results, ['Module1.setUpModule', 'setup 1', 'Test1.testOne', 'Test1.testTwo', 'teardown 1', 'setup 2', 'Test2.testOne', 'Test2.testTwo', 'teardown 2', 'Module1.tearDownModule', 'Module2.setUpModule', 'setup 3', 'Test3.testOne', 'Test3.testTwo', 'teardown 3', 'Module2.tearDownModule']) def test_setup_module(self): class Module(object): moduleSetup = 0 @staticmethod def setUpModule(): Module.moduleSetup += 1 class Test(unittest.TestCase): def test_one(self): pass def test_two(self): pass Test.__module__ = 'Module' sys.modules['Module'] = Module result = self.runTests(Test) self.assertEqual(Module.moduleSetup, 1) self.assertEqual(result.testsRun, 2) self.assertEqual(len(result.errors), 0) def test_error_in_setup_module(self): class Module(object): moduleSetup = 0 moduleTornDown = 0 @staticmethod def setUpModule(): Module.moduleSetup += 1 raise TypeError('foo') @staticmethod def tearDownModule(): Module.moduleTornDown += 1 class Test(unittest.TestCase): classSetUp = False classTornDown = False @classmethod def setUpClass(cls): Test.classSetUp = True @classmethod def tearDownClass(cls): Test.classTornDown = True def test_one(self): pass def test_two(self): pass class Test2(unittest.TestCase): def test_one(self): pass def test_two(self): pass Test.__module__ = 'Module' Test2.__module__ = 'Module' sys.modules['Module'] = Module result = self.runTests(Test, Test2) self.assertEqual(Module.moduleSetup, 1) self.assertEqual(Module.moduleTornDown, 0) self.assertEqual(result.testsRun, 0) self.assertFalse(Test.classSetUp) self.assertFalse(Test.classTornDown) self.assertEqual(len(result.errors), 1) error, _ = result.errors[0] self.assertEqual(str(error), 'setUpModule (Module)') def test_testcase_with_missing_module(self): class Test(unittest.TestCase): def test_one(self): pass def test_two(self): pass Test.__module__ = 'Module' sys.modules.pop('Module', None) result = self.runTests(Test) self.assertEqual(result.testsRun, 2) def test_teardown_module(self): class Module(object): moduleTornDown = 0 @staticmethod def tearDownModule(): Module.moduleTornDown += 1 class Test(unittest.TestCase): def test_one(self): pass def test_two(self): pass Test.__module__ = 'Module' sys.modules['Module'] = Module result = self.runTests(Test) self.assertEqual(Module.moduleTornDown, 1) self.assertEqual(result.testsRun, 2) self.assertEqual(len(result.errors), 0) def test_error_in_teardown_module(self): class Module(object): moduleTornDown = 0 @staticmethod def tearDownModule(): Module.moduleTornDown += 1 raise TypeError('foo') class Test(unittest.TestCase): classSetUp = False classTornDown = False @classmethod def setUpClass(cls): Test.classSetUp = True @classmethod def tearDownClass(cls): Test.classTornDown = True def test_one(self): pass def test_two(self): pass class Test2(unittest.TestCase): def test_one(self): pass def test_two(self): pass Test.__module__ = 'Module' Test2.__module__ = 'Module' sys.modules['Module'] = Module result = self.runTests(Test, Test2) self.assertEqual(Module.moduleTornDown, 1) self.assertEqual(result.testsRun, 4) self.assertTrue(Test.classSetUp) self.assertTrue(Test.classTornDown) self.assertEqual(len(result.errors), 1) error, _ = result.errors[0] self.assertEqual(str(error), 'tearDownModule (Module)') def test_skiptest_in_setupclass(self): class Test(unittest.TestCase): @classmethod def setUpClass(cls): raise unittest.SkipTest('foo') def test_one(self): pass def test_two(self): pass result = self.runTests(Test) self.assertEqual(result.testsRun, 0) self.assertEqual(len(result.errors), 0) self.assertEqual(len(result.skipped), 1) skipped = result.skipped[0][0] self.assertEqual(str(skipped), 'setUpClass (%s.Test)' % __name__) def test_skiptest_in_setupmodule(self): class Test(unittest.TestCase): def test_one(self): pass def test_two(self): pass class Module(object): @staticmethod def setUpModule(): raise unittest.SkipTest('foo') Test.__module__ = 'Module' sys.modules['Module'] = Module result = self.runTests(Test) self.assertEqual(result.testsRun, 0) self.assertEqual(len(result.errors), 0) self.assertEqual(len(result.skipped), 1) skipped = result.skipped[0][0] self.assertEqual(str(skipped), 'setUpModule (Module)') def test_suite_debug_executes_setups_and_teardowns(self): ordering = [] class Module(object): @staticmethod def setUpModule(): ordering.append('setUpModule') @staticmethod def tearDownModule(): ordering.append('tearDownModule') class Test(unittest.TestCase): @classmethod def setUpClass(cls): ordering.append('setUpClass') @classmethod def tearDownClass(cls): ordering.append('tearDownClass') def test_something(self): ordering.append('test_something') Test.__module__ = 'Module' sys.modules['Module'] = Module suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test) suite.debug() expectedOrder = ['setUpModule', 'setUpClass', 'test_something', 'tearDownClass', 'tearDownModule'] self.assertEqual(ordering, expectedOrder) def test_suite_debug_propagates_exceptions(self): class Module(object): @staticmethod def setUpModule(): if phase == 0: raise Exception('setUpModule') @staticmethod def tearDownModule(): if phase == 1: raise Exception('tearDownModule') class Test(unittest.TestCase): @classmethod def setUpClass(cls): if phase == 2: raise Exception('setUpClass') @classmethod def tearDownClass(cls): if phase == 3: raise Exception('tearDownClass') def test_something(self): if phase == 4: raise Exception('test_something') Test.__module__ = 'Module' sys.modules['Module'] = Module _suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test) suite = unittest.TestSuite() suite.addTest(_suite) messages = ('setUpModule', 'tearDownModule', 'setUpClass', 'tearDownClass', 'test_something') for phase, msg in enumerate(messages): with self.assertRaisesRegexp(Exception, msg): suite.debug() if __name__ == '__main__': unittest.main()
gpl-2.0
jnewland/home-assistant
homeassistant/components/clicksend/notify.py
7
3014
"""Clicksend platform for notify component.""" import json import logging from aiohttp.hdrs import CONTENT_TYPE import requests import voluptuous as vol from homeassistant.const import ( CONF_API_KEY, CONF_RECIPIENT, CONF_SENDER, CONF_USERNAME, CONTENT_TYPE_JSON) import homeassistant.helpers.config_validation as cv from homeassistant.components.notify import (PLATFORM_SCHEMA, BaseNotificationService) _LOGGER = logging.getLogger(__name__) BASE_API_URL = 'https://rest.clicksend.com/v3' DEFAULT_SENDER = 'hass' TIMEOUT = 5 HEADERS = {CONTENT_TYPE: CONTENT_TYPE_JSON} PLATFORM_SCHEMA = vol.Schema( vol.All(PLATFORM_SCHEMA.extend({ vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_RECIPIENT, default=[]): vol.All(cv.ensure_list, [cv.string]), vol.Optional(CONF_SENDER, default=DEFAULT_SENDER): cv.string, }),)) def get_service(hass, config, discovery_info=None): """Get the ClickSend notification service.""" if not _authenticate(config): _LOGGER.error("You are not authorized to access ClickSend") return None return ClicksendNotificationService(config) class ClicksendNotificationService(BaseNotificationService): """Implementation of a notification service for the ClickSend service.""" def __init__(self, config): """Initialize the service.""" self.username = config[CONF_USERNAME] self.api_key = config[CONF_API_KEY] self.recipients = config[CONF_RECIPIENT] self.sender = config[CONF_SENDER] def send_message(self, message="", **kwargs): """Send a message to a user.""" data = {"messages": []} for recipient in self.recipients: data["messages"].append({ 'source': 'hass.notify', 'from': self.sender, 'to': recipient, 'body': message, }) api_url = "{}/sms/send".format(BASE_API_URL) resp = requests.post(api_url, data=json.dumps(data), headers=HEADERS, auth=(self.username, self.api_key), timeout=TIMEOUT) if resp.status_code == 200: return obj = json.loads(resp.text) response_msg = obj.get('response_msg') response_code = obj.get('response_code') _LOGGER.error("Error %s : %s (Code %s)", resp.status_code, response_msg, response_code) def _authenticate(config): """Authenticate with ClickSend.""" api_url = '{}/account'.format(BASE_API_URL) resp = requests.get(api_url, headers=HEADERS, auth=(config[CONF_USERNAME], config[CONF_API_KEY]), timeout=TIMEOUT) if resp.status_code != 200: return False return True
apache-2.0
acutesoftware/worldbuild
scripts/minecraft/go_minecraft.py
1
2260
# go_minecraft.py import sys import time import aikif.toolbox.interface_windows_tools as mod_tool players = ['DynamiteBuilder', 'craftandstore'] #server = '1.9' #server = '1.10' server = '1.11.2' seed = 0 if server == '1.11.2': seed = -7560993781265470572 locations = [ {'name':'home', 'loc':'61 64 239'}, {'name':'Woodlands Mansion', 'loc':'4473 66 5773'}, {'name':'Stronghold', 'loc':'-184 67 1736'}, {'name':'Village', 'loc':'-710 87 548'}, ] elif server == '1.10': seed = 8239770600742919613 locations = [ {'name':'home', 'loc':'248 66 -61'}, {'name':'farm', 'loc':'960 77 -260' }, {'name':'floating-garden', 'loc':'685 107 -588' }, {'name':'floating-castle', 'loc':'-202 105 -655' }, {'name':'stronghold', 'loc':'415 72 -2198' }, {'name':'village', 'loc':'121 77 -2019' }, {'name':'overhang-lookout/evil storm and zoo / garage', 'loc':'-449 110 -1830' }, {'name':'rock-island / harbour', 'loc':'154 98 384' }, {'name':'enchanted-village','loc':'1082 87 -1297' }, {'name':'flower-garden','loc':'1254 105 -1807' }, ] else: seed = 2677023417700615710 locations = [ {'name':'v1-home', 'loc':'151 103 736'}, {'name':'v1-treehouse', 'loc':'120 72 662' }, {'name':'v1-castle', 'loc':'-132 68 388' }, {'name':'v1-village', 'loc':'-298 82 946' }, {'name':'v1-stables', 'loc':'-602 82 951' }, {'name':'v1-desert', 'loc':'-1524 97 1580' }, ] print('Minecraft Teleport Service for players ' + str(players)) print('(server version=' + server + ', seed = ' + str(seed) + ' )') for num, l in enumerate(locations): print(str(num+1) + ' = ' + l['name']) loc = locations[int(input('Enter Location ')) - 1] mod_tool.app_activate('Minecraft server') for p in players: print('Teleporting ' + p + ' to ' + loc['name'] + ' (' + loc['loc'] + ')') mod_tool.send_keys('/tp ' + p + ' ' + loc['loc']) mod_tool.send_keys("{ENTER}") # needs Enter key time.sleep(0.1)
gpl-2.0
egoid/baytree
lib/python2.7/site-packages/debug_toolbar/panels/sql/utils.py
11
1987
from __future__ import absolute_import, unicode_literals import re import sqlparse from django.utils.html import escape from sqlparse import tokens as T class BoldKeywordFilter: """sqlparse filter to bold SQL keywords""" def process(self, stream): """Process the token stream""" for token_type, value in stream: is_keyword = token_type in T.Keyword if is_keyword: yield T.Text, '<strong>' yield token_type, escape(value) if is_keyword: yield T.Text, '</strong>' def reformat_sql(sql): stack = sqlparse.engine.FilterStack() stack.preprocess.append(BoldKeywordFilter()) # add our custom filter stack.postprocess.append(sqlparse.filters.SerializerUnicode()) # tokens -> strings return swap_fields(''.join(stack.run(sql))) def swap_fields(sql): expr = r'SELECT</strong> (...........*?) <strong>FROM' subs = (r'SELECT</strong> ' r'<a class="djDebugUncollapsed djDebugToggle" href="#">&#8226;&#8226;&#8226;</a> ' r'<a class="djDebugCollapsed djDebugToggle" href="#">\1</a> ' r'<strong>FROM') return re.sub(expr, subs, sql) def contrasting_color_generator(): """ Generate constrasting colors by varying most significant bit of RGB first, and then vary subsequent bits systematically. """ def rgb_to_hex(rgb): return '#%02x%02x%02x' % tuple(rgb) triples = [(1, 0, 0), (0, 1, 0), (0, 0, 1), (1, 1, 0), (0, 1, 1), (1, 0, 1), (1, 1, 1)] n = 1 << 7 so_far = [[0, 0, 0]] while True: if n == 0: # This happens after 2**24 colours; presumably, never yield "#000000" # black copy_so_far = list(so_far) for triple in triples: for previous in copy_so_far: rgb = [n * triple[i] + previous[i] for i in range(3)] so_far.append(rgb) yield rgb_to_hex(rgb) n >>= 1
mit
georgemarshall/django
tests/custom_managers/models.py
57
6822
""" Giving models a custom manager You can use a custom ``Manager`` in a particular model by extending the base ``Manager`` class and instantiating your custom ``Manager`` in your model. There are two reasons you might want to customize a ``Manager``: to add extra ``Manager`` methods, and/or to modify the initial ``QuerySet`` the ``Manager`` returns. """ from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.db import models class PersonManager(models.Manager): def get_fun_people(self): return self.filter(fun=True) class PublishedBookManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(is_published=True) class AnnotatedBookManager(models.Manager): def get_queryset(self): return super().get_queryset().annotate( favorite_avg=models.Avg('favorite_books__favorite_thing_id') ) class CustomQuerySet(models.QuerySet): def filter(self, *args, **kwargs): queryset = super().filter(fun=True) queryset._filter_CustomQuerySet = True return queryset def public_method(self, *args, **kwargs): return self.all() def _private_method(self, *args, **kwargs): return self.all() def optout_public_method(self, *args, **kwargs): return self.all() optout_public_method.queryset_only = True def _optin_private_method(self, *args, **kwargs): return self.all() _optin_private_method.queryset_only = False class BaseCustomManager(models.Manager): def __init__(self, arg): super().__init__() self.init_arg = arg def filter(self, *args, **kwargs): queryset = super().filter(fun=True) queryset._filter_CustomManager = True return queryset def manager_only(self): return self.all() CustomManager = BaseCustomManager.from_queryset(CustomQuerySet) class CustomInitQuerySet(models.QuerySet): # QuerySet with an __init__() method that takes an additional argument. def __init__(self, custom_optional_arg=None, model=None, query=None, using=None, hints=None): super().__init__(model=model, query=query, using=using, hints=hints) class DeconstructibleCustomManager(BaseCustomManager.from_queryset(CustomQuerySet)): def __init__(self, a, b, c=1, d=2): super().__init__(a) class FunPeopleManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(fun=True) class BoringPeopleManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(fun=False) class Person(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) fun = models.BooleanField(default=False) favorite_book = models.ForeignKey('Book', models.SET_NULL, null=True, related_name='favorite_books') favorite_thing_type = models.ForeignKey('contenttypes.ContentType', models.SET_NULL, null=True) favorite_thing_id = models.IntegerField(null=True) favorite_thing = GenericForeignKey('favorite_thing_type', 'favorite_thing_id') objects = PersonManager() fun_people = FunPeopleManager() boring_people = BoringPeopleManager() custom_queryset_default_manager = CustomQuerySet.as_manager() custom_queryset_custom_manager = CustomManager('hello') custom_init_queryset_manager = CustomInitQuerySet.as_manager() def __str__(self): return "%s %s" % (self.first_name, self.last_name) class FunPerson(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) fun = models.BooleanField(default=True) favorite_book = models.ForeignKey( 'Book', models.SET_NULL, null=True, related_name='fun_people_favorite_books', ) favorite_thing_type = models.ForeignKey('contenttypes.ContentType', models.SET_NULL, null=True) favorite_thing_id = models.IntegerField(null=True) favorite_thing = GenericForeignKey('favorite_thing_type', 'favorite_thing_id') objects = FunPeopleManager() def __str__(self): return "%s %s" % (self.first_name, self.last_name) class Book(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=30) is_published = models.BooleanField(default=False) authors = models.ManyToManyField(Person, related_name='books') fun_authors = models.ManyToManyField(FunPerson, related_name='books') favorite_things = GenericRelation( Person, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', ) fun_people_favorite_things = GenericRelation( FunPerson, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', ) published_objects = PublishedBookManager() annotated_objects = AnnotatedBookManager() class Meta: base_manager_name = 'annotated_objects' def __str__(self): return self.title class FastCarManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(top_speed__gt=150) class Car(models.Model): name = models.CharField(max_length=10) mileage = models.IntegerField() top_speed = models.IntegerField(help_text="In miles per hour.") cars = models.Manager() fast_cars = FastCarManager() def __str__(self): return self.name class FastCarAsBase(Car): class Meta: proxy = True base_manager_name = 'fast_cars' class FastCarAsDefault(Car): class Meta: proxy = True default_manager_name = 'fast_cars' class RestrictedManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(is_public=True) class RelatedModel(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name class RestrictedModel(models.Model): name = models.CharField(max_length=50) is_public = models.BooleanField(default=False) related = models.ForeignKey(RelatedModel, models.CASCADE) objects = RestrictedManager() plain_manager = models.Manager() def __str__(self): return self.name class OneToOneRestrictedModel(models.Model): name = models.CharField(max_length=50) is_public = models.BooleanField(default=False) related = models.OneToOneField(RelatedModel, models.CASCADE) objects = RestrictedManager() plain_manager = models.Manager() def __str__(self): return self.name class AbstractPerson(models.Model): abstract_persons = models.Manager() objects = models.CharField(max_length=30) class Meta: abstract = True class PersonFromAbstract(AbstractPerson): pass
bsd-3-clause
keenondrums/sovrin-node
sovrin_client/agent/walleted.py
1
42903
import asyncio import collections import inspect import json import time from datetime import datetime from typing import Dict, List, Union from base58 import b58decode from common.serializers.serialization import serialize_msg_for_signing from stp_core.common.log import getlogger from plenum.common.signer_did import DidSigner from plenum.common.constants import TYPE, DATA, NONCE, IDENTIFIER, NAME, VERSION, \ TARGET_NYM, ATTRIBUTES, VERKEY, VERIFIABLE_ATTRIBUTES, PREDICATES from plenum.common.types import f from plenum.common.util import getTimeBasedId, getCryptonym, \ isMaxCheckTimeExpired, convertTimeBasedReqIdToMillis, friendlyToRaw from plenum.common.verifier import DidVerifier from anoncreds.protocol.issuer import Issuer from anoncreds.protocol.prover import Prover from anoncreds.protocol.verifier import Verifier from anoncreds.protocol.globals import TYPE_CL from anoncreds.protocol.types import AttribDef, ID, ProofRequest, AvailableClaim from plenum.common.exceptions import NotConnectedToAny from sovrin_client.agent.agent_issuer import AgentIssuer from sovrin_client.agent.backend import BackendSystem from sovrin_client.agent.agent_prover import AgentProver from sovrin_client.agent.agent_verifier import AgentVerifier from sovrin_client.agent.constants import ALREADY_ACCEPTED_FIELD, CLAIMS_LIST_FIELD, \ REQ_MSG, PING, ERROR, EVENT, EVENT_NAME, EVENT_NOTIFY_MSG, \ EVENT_POST_ACCEPT_INVITE, PONG, EVENT_NOT_CONNECTED_TO_ANY_ENV from sovrin_client.agent.exception import NonceNotFound, SignatureRejected from sovrin_client.agent.helper import friendlyVerkeyToPubkey, rawVerkeyToPubkey from sovrin_client.agent.msg_constants import ACCEPT_INVITE, CLAIM_REQUEST, \ PROOF, AVAIL_CLAIM_LIST, CLAIM, PROOF_STATUS, NEW_AVAILABLE_CLAIMS, \ REF_REQUEST_ID, REQ_AVAIL_CLAIMS, INVITE_ACCEPTED, PROOF_REQUEST from sovrin_client.client.wallet.attribute import Attribute, LedgerStore from sovrin_client.client.wallet.connection import Connection, constant from sovrin_client.client.wallet.wallet import Wallet from sovrin_common.exceptions import ConnectionNotFound, ConnectionAlreadyExists, \ NotConnectedToNetwork, LinkNotReady, VerkeyNotFound, RemoteEndpointNotFound from sovrin_common.identity import Identity from sovrin_common.constants import ENDPOINT from sovrin_common.util import ensureReqCompleted from sovrin_common.config import agentLoggingLevel from sovrin_common.exceptions import InvalidConnectionException from plenum.common.constants import PUBKEY from sovrin_common.util import getNonceForProof logger = getlogger() logger.setLevel(agentLoggingLevel) class Walleted(AgentIssuer, AgentProver, AgentVerifier): """ An agent with a self-contained wallet. Normally, other logic acts upon a remote agent. That other logic holds keys and signs messages and transactions that the Agent then forwards. In this case, the agent holds a wallet. """ def __init__(self, issuer: Issuer = None, prover: Prover = None, verifier: Verifier = None): AgentIssuer.__init__(self, issuer) AgentProver.__init__(self, prover) AgentVerifier.__init__(self, verifier) # TODO Why are we syncing the client here? if self.client: self.syncClient() self.rcvdMsgStore = {} # type: Dict[reqId, [reqMsg]] self.msgHandlers = { ERROR: self._handleError, EVENT: self._eventHandler, PING: self._handlePing, ACCEPT_INVITE: self._handleAcceptance, REQ_AVAIL_CLAIMS: self.processReqAvailClaims, CLAIM_REQUEST: self.processReqClaim, CLAIM: self.handleReqClaimResponse, PROOF: self.verifyProof, PROOF_STATUS: self.handleProofStatusResponse, PROOF_REQUEST: self.handleProofRequest, PONG: self._handlePong, INVITE_ACCEPTED: self._handleAcceptInviteResponse, AVAIL_CLAIM_LIST: self._handleAvailableClaimsResponse, NEW_AVAILABLE_CLAIMS: self._handleNewAvailableClaimsDataResponse } self.logger = logger self.issuer_backend = None self._invites = {} # type: Dict[Nonce, Tuple(InternalId, str)] self._attribDefs = {} # type: Dict[str, AttribDef] self.defined_claims = [] # type: List[Dict[str, Any] # dict for proof request schema Dict[str, Dict[str, any]] self._proofRequestsSchema = {} def syncClient(self): obs = self._wallet.handleIncomingReply if not self.client.hasObserver(obs): self.client.registerObserver(obs) self._wallet.pendSyncRequests() prepared = self._wallet.preparePending() self.client.submitReqs(*prepared) @property def wallet(self) -> Wallet: return self._wallet @wallet.setter def wallet(self, wallet): self._wallet = wallet @property def lockedMsgs(self): # Msgs for which signature verification is required return ACCEPT_INVITE, CLAIM_REQUEST, PROOF, \ CLAIM, AVAIL_CLAIM_LIST, EVENT, PONG, REQ_AVAIL_CLAIMS async def postProofVerif(self, claimName, link, frm): raise NotImplementedError def is_claim_available(self, link, claim_name): return any( ac[NAME] == claim_name for ac in self._get_available_claim_list_by_internal_id( link.internalId)) async def _postProofVerif(self, claimName, link, frm): link.verifiedClaimProofs.append(claimName) await self.postProofVerif(claimName, link, frm) async def _set_available_claim_by_internal_id(self, internal_id, schema_id): sd = await self.schema_dict_from_id(schema_id) try: if not any( d == sd for d in self.issuer.wallet.availableClaimsByInternalId[internal_id]): self.issuer.wallet.availableClaimsByInternalId[internal_id].append( sd) except KeyError: self.issuer.wallet.availableClaimsByInternalId[internal_id] = [sd] def _get_available_claim_list_by_internal_id(self, internal_id): return self.issuer.wallet.availableClaimsByInternalId.get( internal_id, set()) def get_available_claim_list(self, link): li = self.wallet.getConnectionBy(remote=link.remoteIdentifier) # TODO: Need to return set instead of list, but if we return set, # stack communication fails as set is not json serializable, # need to work on that. if li is None: return list() return list( self._get_available_claim_list_by_internal_id(li.internalId)) def getErrorResponse(self, reqBody, errorMsg="Error"): invalidSigResp = { TYPE: ERROR, DATA: errorMsg, REQ_MSG: reqBody, } return invalidSigResp def logAndSendErrorResp(self, to, reqBody, respMsg, logMsg): logger.warning(logMsg) self.signAndSend(msg=self.getErrorResponse(reqBody, respMsg), signingIdr=self.wallet.defaultId, name=to) # TODO: Verification needs to be moved out of it, # use `verifySignature` instead def verifyAndGetLink(self, msg): body, (frm, ha) = msg nonce = body.get(NONCE) try: kwargs = dict(nonce=nonce, remoteIdr=body.get( f.IDENTIFIER.nm), remoteHa=ha) if ha is None: # Incase of ZStack, kwargs.update(remotePubkey=frm) return self.linkFromNonce(**kwargs) except NonceNotFound: self.logAndSendErrorResp(frm, body, "Nonce not found", "Nonce not found for msg: {}".format(msg)) return None def linkFromNonce(self, nonce, remoteIdr, remoteHa=None, remotePubkey=None): internalId = self.get_internal_id_by_nonce(nonce) linkName = self.get_link_name_by_internal_id(internalId) link = self.wallet.getConnectionBy(internalId=internalId) if not link: # QUESTION: We use wallet.defaultId as the local identifier, # this looks ok for test code, but not production code link = Connection(linkName, self.wallet.defaultId, self.wallet.getVerkey(), request_nonce=nonce, remoteIdentifier=remoteIdr, remoteEndPoint=remoteHa, internalId=internalId, remotePubkey=remotePubkey) self.wallet.addConnection(link) else: link.remoteIdentifier = remoteIdr link.remoteEndPoint = remoteHa return link def get_internal_id_by_nonce(self, nonce): if nonce in self._invites: return self._invites[nonce][0] else: raise NonceNotFound def get_link_name_by_internal_id(self, internalId): for invite in self._invites.values(): if invite[0] == internalId: return invite[1] def set_issuer_backend(self, backend: BackendSystem): self.issuer_backend = backend async def publish_issuer_keys(self, schema_id, p_prime, q_prime): keys = await self.issuer.genKeys(schema_id, p_prime=p_prime, q_prime=q_prime) await self.add_to_available_claims(schema_id) return keys async def schema_dict_from_id(self, schema_id): schema = await self.issuer.wallet.getSchema(schema_id) return self.schema_dict(schema) async def publish_revocation_registry(self, schema_id, rev_reg_id='110', size=5): return await self.issuer.issueAccumulator(schemaId=schema_id, iA=rev_reg_id, L=size) def schema_dict(self, schema): return { NAME: schema.name, VERSION: schema.version, "schemaSeqNo": schema.seqId } async def add_to_available_claims(self, schema_id): schema = await self.issuer.wallet.getSchema(schema_id) self.defined_claims.append(self.schema_dict(schema)) async def publish_schema(self, attrib_def_name, schema_name, schema_version): attribDef = self._attribDefs[attrib_def_name] schema = await self.issuer.genSchema(schema_name, schema_version, attribDef.attribNames()) schema_id = ID(schemaKey=schema.getKey(), schemaId=schema.seqId) return schema_id def add_attribute_definition(self, attr_def: AttribDef): self._attribDefs[attr_def.name] = attr_def async def get_claim(self, schema_id: ID): return await self.prover.wallet.getClaimAttributes(schema_id) def new_identifier(self, seed=None): idr, _ = self.wallet.addIdentifier(seed=seed) verkey = self.wallet.getVerkey(idr) return idr, verkey def get_link_by_name(self, name): return self.wallet.getConnection(str(name)) def signAndSendToLink(self, msg, linkName, origReqId=None): link = self.wallet.getConnection(linkName, required=True) if not link.localIdentifier: raise LinkNotReady('connection is not yet established, ' 'send/accept request first') ha = link.getRemoteEndpoint(required=False) name = link.name if not ha: # if not remote address is present, then it's upcominh link, so we may have no # explicit connection (wrk in a listener mode). # PulicKey is used as a name in this case name = link.remotePubkey if ha: self.connectTo(link=link) return self.signAndSend(msg=msg, signingIdr=link.localIdentifier, name=name, ha=ha, origReqId=origReqId) def signAndSend(self, msg, signingIdr, name=None, ha=None, origReqId=None): msg[f.REQ_ID.nm] = getTimeBasedId() if origReqId: msg[REF_REQUEST_ID] = origReqId msg[IDENTIFIER] = signingIdr signature = self.wallet.signMsg(msg, signingIdr) msg[f.SIG.nm] = signature self.sendMessage(msg, name=name, ha=ha) return msg[f.REQ_ID.nm] @staticmethod def getCommonMsg(typ, data): msg = { TYPE: typ, DATA: data } return msg @classmethod def createInviteAcceptedMsg(cls, claimLists, alreadyAccepted=False): data = { CLAIMS_LIST_FIELD: claimLists } if alreadyAccepted: data[ALREADY_ACCEPTED_FIELD] = alreadyAccepted return cls.getCommonMsg(INVITE_ACCEPTED, data) @classmethod def createNewAvailableClaimsMsg(cls, claimLists): data = { CLAIMS_LIST_FIELD: claimLists } return cls.getCommonMsg(NEW_AVAILABLE_CLAIMS, data) @classmethod def createClaimMsg(cls, claim): return cls.getCommonMsg(CLAIM, claim) def _eventHandler(self, msg): body, _ = msg eventName = body[EVENT_NAME] data = body[DATA] self.notifyEventListeners(eventName, **data) def notifyEventListeners(self, eventName, **data): for el in self._eventListeners.get(eventName, []): el(notifier=self, **data) def notifyMsgListener(self, msg): self.notifyEventListeners(EVENT_NOTIFY_MSG, msg=msg) def isSignatureVerifRespRequired(self, typ): return typ in self.lockedMsgs and typ not in [EVENT, PING, PONG] def sendSigVerifResponseMsg(self, respMsg, to, reqMsgTyp, identifier): if self.isSignatureVerifRespRequired(reqMsgTyp): self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, respMsg, identifier, to) def handleEndpointMessage(self, msg): body, frm = msg logger.debug("Message received (from -> {}): {}".format(frm, body)) if isinstance(frm, bytes): frm = frm.decode() for reqFieldName in (TYPE, f.REQ_ID.nm): reqFieldValue = body.get(reqFieldName) if not reqFieldValue: errorMsg = "{} not specified in message: {}".format( reqFieldName, body) self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, errorMsg, self.wallet.defaultId, frm) logger.warning("{}".format(errorMsg)) return typ = body.get(TYPE) link = self.wallet.getConnectionBy(remote=body.get(f.IDENTIFIER.nm)) # If accept invite is coming the first time, then use the default # identifier of the wallet since link wont be created if typ == ACCEPT_INVITE and link is None: localIdr = self.wallet.defaultId else: # if accept invite is not the message type # and we are still missing link, then return the error if link is None: linkNotCreated = ' Error processing {}. ' \ 'Connection is not yet created.'.format(typ) self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, linkNotCreated, self.wallet.defaultId, frm) return localIdr = link.localIdentifier if typ in self.lockedMsgs: try: self.verifySignature(body) except SignatureRejected: self.sendSigVerifResponseMsg("\nSignature rejected.", frm, typ, localIdr) return reqId = body.get(f.REQ_ID.nm) oldResps = self.rcvdMsgStore.get(reqId) if oldResps: oldResps.append(msg) else: self.rcvdMsgStore[reqId] = [msg] # TODO: Question: Should we sending an acknowledgement for every message? # We are sending, ACKs for "signature accepted" messages too self.sendSigVerifResponseMsg("\nSignature accepted.", frm, typ, localIdr) handler = self.msgHandlers.get(typ) if handler: # TODO we should verify signature here frmHa = self.endpoint.getHa(frm) # `frmHa` can be None res = handler((body, (frm, frmHa))) if inspect.isawaitable(res): self.loop.call_soon(asyncio.ensure_future, res) else: raise NotImplementedError("No type handle found for {} message". format(typ)) def _handleError(self, msg): body, _ = msg self.notifyMsgListener("Error ({}) occurred while processing this " "msg: {}".format(body[DATA], body[REQ_MSG])) def _handlePing(self, msg): body, (frm, ha) = msg link = self.wallet.getConnectionBy(nonce=body.get(NONCE)) if link: self.logger.info('Ping sent to %s', link.remoteIdentifier) self.signAndSend({TYPE: 'pong'}, self.wallet.defaultId, frm, origReqId=body.get(f.REQ_ID.nm)) def _handlePong(self, msg): body, (frm, ha) = msg identifier = body.get(IDENTIFIER) if identifier: li = self._getLinkByTarget(getCryptonym(identifier)) if li: self.logger.info('Pong received from %s', li.remoteIdentifier) self.notifyMsgListener(" Pong received.") else: self.notifyMsgListener( " Pong received from unknown endpoint.") else: self.notifyMsgListener(' Identifier is not yet set.') def _handleNewAvailableClaimsDataResponse(self, msg): body, _ = msg isVerified = self.verifySignature(body) if isVerified: identifier = body.get(IDENTIFIER) li = self._getLinkByTarget(getCryptonym(identifier)) if li: self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] newAvailableClaims = self._getNewAvailableClaims( li, rcvdAvailableClaims) if newAvailableClaims: li.availableClaims.extend(newAvailableClaims) claimNames = ", ".join( [n for n, _, _ in newAvailableClaims]) self.notifyMsgListener( " Available Claim(s): {}\n".format(claimNames)) else: self.notifyMsgListener("No matching connection found") @staticmethod def _getNewAvailableClaims( li, rcvdAvailableClaims) -> List[AvailableClaim]: receivedClaims = [AvailableClaim(cl[NAME], cl[VERSION], li.remoteIdentifier) for cl in rcvdAvailableClaims] existingAvailableClaims = set(li.availableClaims) newReceivedClaims = set(receivedClaims) return list(newReceivedClaims - existingAvailableClaims) def _handleAvailableClaimsResponse(self, msg): body, _ = msg identifier = body.get(IDENTIFIER) li = self._getLinkByTarget(getCryptonym(identifier)) if li: rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] if len(rcvdAvailableClaims) > 0: self.notifyMsgListener(" Available Claim(s): {}". format( ",".join([rc.get(NAME) for rc in rcvdAvailableClaims]))) else: self.notifyMsgListener(" Available Claim(s): " "No available claims found") def _handleAcceptInviteResponse(self, msg): body, _ = msg identifier = body.get(IDENTIFIER) li = self._getLinkByTarget(getCryptonym(identifier)) if li: # TODO: Show seconds took to respond self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm)) self.notifyMsgListener(" Trust established.") alreadyAccepted = body[DATA].get(ALREADY_ACCEPTED_FIELD) if alreadyAccepted: self.notifyMsgListener(" Already accepted.") else: self.notifyMsgListener(" DID created in Sovrin.") li.connection_status = constant.CONNECTION_STATUS_ACCEPTED rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD] newAvailableClaims = self._getNewAvailableClaims( li, rcvdAvailableClaims) if newAvailableClaims: li.availableClaims.extend(newAvailableClaims) self.notifyMsgListener(" Available Claim(s): {}". format( ",".join([rc.get(NAME) for rc in rcvdAvailableClaims]))) try: self._checkIfLinkIdentifierWrittenToSovrin( li, newAvailableClaims) except NotConnectedToAny: self.notifyEventListeners( EVENT_NOT_CONNECTED_TO_ANY_ENV, msg="Cannot check if identifier is written to Sovrin.") else: self.notifyMsgListener("No matching connection found") def getVerkeyForLink(self, link): # TODO: Get latest verkey for this link's remote identifier from Sovrin if link.remoteVerkey: return link.remoteVerkey else: raise VerkeyNotFound("verkey not set in connection") def getLinkForMsg(self, msg): nonce = msg.get(NONCE) identifier = msg.get(f.IDENTIFIER.nm) link = self.wallet.getConnectionBy(nonce=nonce, remote=identifier) if link: return link else: raise ConnectionNotFound def verifySignature(self, msg: Dict[str, str]): signature = msg.get(f.SIG.nm) identifier = msg.get(IDENTIFIER) msgWithoutSig = {k: v for k, v in msg.items() if k != f.SIG.nm} # TODO This assumes the current key is the cryptonym. This is a BAD # ASSUMPTION!!! Sovrin needs to provide the current key. ser = serialize_msg_for_signing(msgWithoutSig) signature = b58decode(signature.encode()) typ = msg.get(TYPE) # TODO: Maybe keeping ACCEPT_INVITE open is a better option than keeping # an if condition here? if typ == ACCEPT_INVITE: verkey = msg.get(VERKEY) else: try: link = self.getLinkForMsg(msg) verkey = self.getVerkeyForLink(link) except (ConnectionNotFound, VerkeyNotFound): # This is for verification of `NOTIFY` events link = self.wallet.getConnectionBy(remote=identifier) # TODO: If verkey is None, it should be fetched from Sovrin. # Assuming CID for now. verkey = link.remoteVerkey v = DidVerifier(verkey, identifier=identifier) if not v.verify(signature, ser): raise SignatureRejected else: if typ == ACCEPT_INVITE: self.logger.info('Signature accepted.') return True def _getLinkByTarget(self, target) -> Connection: return self.wallet.getConnectionBy(remote=target) def _checkIfLinkIdentifierWrittenToSovrin( self, li: Connection, availableClaims): req = self.getIdentity(li.localIdentifier) self.notifyMsgListener("\nSynchronizing...") def getNymReply(reply, err, availableClaims, li: Connection): if reply.get(DATA) and json.loads(reply[DATA])[TARGET_NYM] == \ li.localIdentifier: self.notifyMsgListener( " Confirmed DID written to Sovrin.") self.notifyEventListeners( EVENT_POST_ACCEPT_INVITE, connection=li) else: self.notifyMsgListener( " DID is not yet written to Sovrin") self.loop.call_later(.2, ensureReqCompleted, self.loop, req.key, self.client, getNymReply, (availableClaims, li)) def notifyResponseFromMsg(self, linkName, reqId=None): if reqId: # TODO: This logic assumes that the req id is time based curTimeBasedId = getTimeBasedId() timeTakenInMillis = convertTimeBasedReqIdToMillis( curTimeBasedId - reqId) if timeTakenInMillis >= 1000: responseTime = ' ({} sec)'.format( round(timeTakenInMillis / 1000, 2)) else: responseTime = ' ({} ms)'.format(round(timeTakenInMillis, 2)) else: responseTime = '' self.notifyMsgListener("\nResponse from {}{}:".format(linkName, responseTime)) def notifyToRemoteCaller(self, event, msg, signingIdr, to, origReqId=None): resp = { TYPE: EVENT, EVENT_NAME: event, DATA: {'msg': msg} } self.signAndSend(resp, signingIdr, to, origReqId=origReqId) def _handleAcceptance(self, msg): body, (frm, ha) = msg link = self.verifyAndGetLink(msg) # TODO this is really kludgy code... needs refactoring # exception handling, separation of concerns, etc. if not link: return logger.debug("proceeding with connection: {}".format(link.name)) identifier = body.get(f.IDENTIFIER.nm) verkey = body.get(VERKEY) idy = Identity(identifier, verkey=verkey) link.remoteVerkey = verkey try: pendingCount = self.wallet.addTrustAnchoredIdentity(idy) logger.debug("pending request count {}".format(pendingCount)) alreadyAdded = False except Exception as e: if e.args[0] in ['identifier already added']: alreadyAdded = True else: logger.warning("Exception raised while adding nym, " "error was: {}".format(e.args[0])) raise e def send_claims(reply=None, error=None): return self.sendClaimList(link=link, alreadyAdded=alreadyAdded, sender=frm, reqId=body.get(f.REQ_ID.nm), reply=reply, error=error) if alreadyAdded: send_claims() logger.debug("already accepted, " "so directly sending available claims") self.logger.info('Already added identifier [{}] in sovrin' .format(identifier)) # self.notifyToRemoteCaller(EVENT_NOTIFY_MSG, # " Already accepted", # link.verkey, frm) else: logger.debug( "not added to the ledger, so add nym to the ledger " "and then will send available claims") reqs = self.wallet.preparePending() # Assuming there was only one pending request logger.debug("sending to sovrin {}".format(reqs[0])) # Need to think through # how to provide separate logging for each agent # anyhow this class should be implemented by each agent # so we might not even need to add it as a separate logic self.logger.info('Creating identifier [{}] in sovrin' .format(identifier)) self._sendToSovrinAndDo(reqs[0], clbk=send_claims) # TODO: If I have the below exception thrown, somehow the # error msg which is sent in verifyAndGetLink is not being received # on the other end, so for now, commented, need to come back to this # else: # raise NotImplementedError def sendClaimList(self, link, alreadyAdded, sender, reqId, reply=None, error=None): logger.debug("sending available claims to {}".format( link.remoteIdentifier)) resp = self.createInviteAcceptedMsg( self.get_available_claim_list(link), alreadyAccepted=alreadyAdded) self.signAndSend(resp, link.localIdentifier, sender, origReqId=reqId) def _sendToSovrinAndDo(self, req, clbk=None, *args, **kwargs): self.client.submitReqs(req) ensureReqCompleted(self.loop, req.key, self.client, clbk, *args, **kwargs) def newAvailableClaimsPostClaimVerif(self, claimName): raise NotImplementedError def sendNewAvailableClaimsData(self, nac, frm, link): if len(nac) > 0: resp = self.createNewAvailableClaimsMsg(nac) self.signAndSend(resp, link.localIdentifier, frm) def sendPing(self, linkName): link = self.wallet.getConnection(linkName, required=True) self.connectTo(link=link) ha = link.getRemoteEndpoint(required=True) params = dict(ha=ha) msg = { TYPE: 'ping', NONCE: link.request_nonce, f.REQ_ID.nm: getTimeBasedId(), f.IDENTIFIER.nm: link.localIdentifier } reqId = self.sendMessage(msg, **params) self.notifyMsgListener(" Ping sent.") return reqId def connectTo(self, linkName=None, link=None): assert linkName or link if link is None: link = self.wallet.getConnection(linkName, required=True) ha = link.getRemoteEndpoint(required=True) verKeyRaw = friendlyToRaw( link.full_remote_verkey) if link.full_remote_verkey else None publicKeyRaw = friendlyToRaw( link.remotePubkey) if link.remotePubkey else None if verKeyRaw is None and publicKeyRaw is None: raise InvalidConnectionException( "verkey or publicKey is required for connection.") if publicKeyRaw is None: publicKeyRaw = rawVerkeyToPubkey(verKeyRaw) self.endpoint.connectIfNotConnected( name=link.name, ha=ha, verKeyRaw=verKeyRaw, publicKeyRaw=publicKeyRaw) # duplicate function # def loadInvitationFile(self, filePath): # with open(filePath) as data_file: # request = json.load( # data_file, object_pairs_hook=collections.OrderedDict) # return self.load_request_dict(request) def load_request_str(self, json_str): request = json.loads( json_str, object_pairs_hook=collections.OrderedDict) return self.load_request_dict(request) def load_request_dict(self, request_dict): link_request = request_dict.get("connection-request") if not link_request: raise ConnectionNotFound linkName = link_request["name"] existingLinkInvites = self.wallet. \ getMatchingConnections(linkName) if len(existingLinkInvites) >= 1: return self._merge_request(request_dict) Connection.validate(request_dict) link = self.load_request(request_dict) return link def load_request(self, request_data): link_request = request_data["connection-request"] remoteIdentifier = link_request[f.IDENTIFIER.nm] # TODO signature should be validated! # signature = request_data["sig"] link_request_name = link_request[NAME] remoteEndPoint = link_request.get("endpoint", None) remote_verkey = link_request.get("verkey", None) linkNonce = link_request[NONCE] proofRequestsJson = request_data.get("proof-requests", None) proofRequests = [] if proofRequestsJson: for cr in proofRequestsJson: proofRequests.append( ProofRequest( cr[NAME], cr[VERSION], getNonceForProof(linkNonce), cr[ATTRIBUTES], cr[VERIFIABLE_ATTRIBUTES] if VERIFIABLE_ATTRIBUTES in cr else [], cr[PREDICATES] if PREDICATES in cr else [])) self.notifyMsgListener("1 connection request found for {}.". format(link_request_name)) self.notifyMsgListener("Creating connection for {}.". format(link_request_name)) # TODO: Would we always have a trust anchor corresponding to a link? li = Connection(name=link_request_name, trustAnchor=link_request_name, remoteIdentifier=remoteIdentifier, remoteEndPoint=remoteEndPoint, request_nonce=linkNonce, proofRequests=proofRequests, remote_verkey=remote_verkey) self.wallet.addConnection(li) return li def load_request_file(self, filePath): with open(filePath) as data_file: request_data = json.load( data_file, object_pairs_hook=collections.OrderedDict) link_request = request_data.get("connection-request") if not link_request: raise ConnectionNotFound linkName = link_request["name"] existingLinkInvites = self.wallet. \ getMatchingConnections(linkName) if len(existingLinkInvites) >= 1: return self._merge_request(request_data) Connection.validate(request_data) link = self.load_request(request_data) return link def _merge_request(self, request_data): link_request = request_data.get('connection-request') linkName = link_request['name'] link = self.wallet.getConnection(linkName) request_proof_requests = request_data.get('proof-requests', None) nonce = link_request.get(NONCE) if request_proof_requests: for icr in request_proof_requests: # match is found if name and version are same matchedProofRequest = next( (cr for cr in link.proofRequests if (cr.name == icr[NAME] and cr.version == icr[VERSION])), None ) # if link.requestedProofs contains any claim request if matchedProofRequest: # merge 'attributes' and 'verifiableAttributes' matchedProofRequest.attributes = { **matchedProofRequest.attributes, **icr[ATTRIBUTES] } matchedProofRequest.verifiableAttributes = dict( matchedProofRequest.verifiableAttributes, **icr[VERIFIABLE_ATTRIBUTES]) else: # otherwise append proof request to link link.proofRequests.append( ProofRequest( icr[NAME], icr[VERSION], getNonceForProof(nonce), attributes=icr[ATTRIBUTES], verifiableAttributes=icr[VERIFIABLE_ATTRIBUTES])) return link else: raise ConnectionAlreadyExists def accept_request(self, link: Union[str, Connection]): if isinstance(link, str): link = self.wallet.getConnection(link, required=True) elif isinstance(link, Connection): pass else: raise TypeError( "Type of connection must be either string or Link but " "provided {}".format( type(link))) # TODO should move to wallet in a method like accept(link) if not link.localIdentifier: self.create_identifier_for_link(link) msg = { TYPE: ACCEPT_INVITE, # TODO should not send this... because origin should be the sender NONCE: link.request_nonce, VERKEY: self.wallet.getVerkey(link.localIdentifier) } logger.debug("{} accepting request from {} with id {}". format(self.name, link.name, link.remoteIdentifier)) self.logger.info('Accepting request with nonce {} from id {}' .format(link.request_nonce, link.remoteIdentifier)) self.signAndSendToLink(msg, link.name) # def _handleSyncNymResp(self, link, additionalCallback): # def _(reply, err): # if err: # raise RuntimeError(err) # reqId = self._updateLinkWithLatestInfo(link, reply) # if reqId: # self.loop.call_later(.2, # self.executeWhenResponseRcvd, # time.time(), 8000, # self.loop, reqId, PONG, True, # additionalCallback, reply, err) # else: # additionalCallback(reply, err) # # return _ def create_identifier_for_link(self, link): signer = DidSigner() self.wallet.addIdentifier(signer=signer) link.localIdentifier = signer.identifier link.localVerkey = signer.verkey def _handleSyncResp(self, link, additionalCallback): def _(reply, err): if err: raise RuntimeError(err) reqId = self._updateLinkWithLatestInfo(link, reply) if reqId: self.loop.call_later(.2, self.executeWhenResponseRcvd, time.time(), 8000, self.loop, reqId, PONG, True, additionalCallback, reply, err) else: if callable(additionalCallback): additionalCallback(reply, err) return _ def _updateLinkWithLatestInfo(self, link: Connection, reply): if DATA in reply and reply[DATA]: data = json.loads(reply[DATA]) verkey = data.get(VERKEY) if verkey is not None: link.remoteVerkey = data[VERKEY] ep = data.get(ENDPOINT) if isinstance(ep, dict): # TODO: Validate its an IP port pair or a malicious entity # can crash the code if 'ha' in ep: ip, port = ep['ha'].split(":") link.remoteEndPoint = (ip, int(port)) if PUBKEY in ep: link.remotePubkey = ep[PUBKEY] else: link.remotePubkey = friendlyVerkeyToPubkey( link.full_remote_verkey) if link.full_remote_verkey else None link.connection_last_synced = datetime.now() self.notifyMsgListener( " Connection {} synced".format(link.name)) def _pingToEndpoint(self, name, endpoint): self.notifyMsgListener("\nPinging target endpoint: {}". format(endpoint)) reqId = self.sendPing(linkName=name) return reqId def sync(self, linkName, doneCallback=None): if not self.client.isReady(): raise NotConnectedToNetwork link = self.wallet.getConnection(linkName, required=True) identifier = link.remoteIdentifier identity = Identity(identifier=identifier) req = self.wallet.requestIdentity(identity, sender=self.wallet.defaultId) self.client.submitReqs(req) self.loop.call_later(.2, ensureReqCompleted, self.loop, req.key, self.client, self._handleSyncResp(link, None)) attrib = Attribute(name=ENDPOINT, value=None, dest=identifier, ledgerStore=LedgerStore.RAW) req = self.wallet.requestAttribute( attrib, sender=self.wallet.defaultId) self.client.submitReqs(req) self.loop.call_later(.2, ensureReqCompleted, self.loop, req.key, self.client, self._handleSyncResp(link, doneCallback)) def executeWhenResponseRcvd(self, startTime, maxCheckForMillis, loop, reqId, respType, checkIfLinkExists, clbk, *args): if isMaxCheckTimeExpired(startTime, maxCheckForMillis): clbk( None, "No response received within specified time ({} mills). " "Retry the command and see if that works.\n". format(maxCheckForMillis)) else: found = False rcvdResponses = self.rcvdMsgStore.get(reqId) if rcvdResponses: for msg in rcvdResponses: body, frm = msg if body.get(TYPE) == respType: if checkIfLinkExists: identifier = body.get(IDENTIFIER) li = self._getLinkByTarget( getCryptonym(identifier)) linkCheckOk = li is not None else: linkCheckOk = True if linkCheckOk: found = True break if found: clbk(*args) else: loop.call_later(.2, self.executeWhenResponseRcvd, startTime, maxCheckForMillis, loop, reqId, respType, checkIfLinkExists, clbk, *args)
apache-2.0
czgu/metaHack
env/lib/python2.7/site-packages/django/contrib/gis/gdal/tests/test_envelope.py
94
3660
import unittest from unittest import skipUnless from django.contrib.gis.gdal import HAS_GDAL if HAS_GDAL: from django.contrib.gis.gdal import Envelope, OGRException class TestPoint(object): def __init__(self, x, y): self.x = x self.y = y @skipUnless(HAS_GDAL, "GDAL is required") class EnvelopeTest(unittest.TestCase): def setUp(self): self.e = Envelope(0, 0, 5, 5) def test01_init(self): "Testing Envelope initialization." e1 = Envelope((0, 0, 5, 5)) Envelope(0, 0, 5, 5) Envelope(0, '0', '5', 5) # Thanks to ww for this Envelope(e1._envelope) self.assertRaises(OGRException, Envelope, (5, 5, 0, 0)) self.assertRaises(OGRException, Envelope, 5, 5, 0, 0) self.assertRaises(OGRException, Envelope, (0, 0, 5, 5, 3)) self.assertRaises(OGRException, Envelope, ()) self.assertRaises(ValueError, Envelope, 0, 'a', 5, 5) self.assertRaises(TypeError, Envelope, 'foo') self.assertRaises(OGRException, Envelope, (1, 1, 0, 0)) try: Envelope(0, 0, 0, 0) except OGRException: self.fail("shouldn't raise an exception for min_x == max_x or min_y == max_y") def test02_properties(self): "Testing Envelope properties." e = Envelope(0, 0, 2, 3) self.assertEqual(0, e.min_x) self.assertEqual(0, e.min_y) self.assertEqual(2, e.max_x) self.assertEqual(3, e.max_y) self.assertEqual((0, 0), e.ll) self.assertEqual((2, 3), e.ur) self.assertEqual((0, 0, 2, 3), e.tuple) self.assertEqual('POLYGON((0.0 0.0,0.0 3.0,2.0 3.0,2.0 0.0,0.0 0.0))', e.wkt) self.assertEqual('(0.0, 0.0, 2.0, 3.0)', str(e)) def test03_equivalence(self): "Testing Envelope equivalence." e1 = Envelope(0.523, 0.217, 253.23, 523.69) e2 = Envelope((0.523, 0.217, 253.23, 523.69)) self.assertEqual(e1, e2) self.assertEqual((0.523, 0.217, 253.23, 523.69), e1) def test04_expand_to_include_pt_2_params(self): "Testing Envelope expand_to_include -- point as two parameters." self.e.expand_to_include(2, 6) self.assertEqual((0, 0, 5, 6), self.e) self.e.expand_to_include(-1, -1) self.assertEqual((-1, -1, 5, 6), self.e) def test05_expand_to_include_pt_2_tuple(self): "Testing Envelope expand_to_include -- point as a single 2-tuple parameter." self.e.expand_to_include((10, 10)) self.assertEqual((0, 0, 10, 10), self.e) self.e.expand_to_include((-10, -10)) self.assertEqual((-10, -10, 10, 10), self.e) def test06_expand_to_include_extent_4_params(self): "Testing Envelope expand_to_include -- extent as 4 parameters." self.e.expand_to_include(-1, 1, 3, 7) self.assertEqual((-1, 0, 5, 7), self.e) def test06_expand_to_include_extent_4_tuple(self): "Testing Envelope expand_to_include -- extent as a single 4-tuple parameter." self.e.expand_to_include((-1, 1, 3, 7)) self.assertEqual((-1, 0, 5, 7), self.e) def test07_expand_to_include_envelope(self): "Testing Envelope expand_to_include with Envelope as parameter." self.e.expand_to_include(Envelope(-1, 1, 3, 7)) self.assertEqual((-1, 0, 5, 7), self.e) def test08_expand_to_include_point(self): "Testing Envelope expand_to_include with Point as parameter." self.e.expand_to_include(TestPoint(-1, 1)) self.assertEqual((-1, 0, 5, 5), self.e) self.e.expand_to_include(TestPoint(10, 10)) self.assertEqual((-1, 0, 10, 10), self.e)
apache-2.0
blaggacao/odoo
addons/website_forum/__openerp__.py
62
1843
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2014-Today OpenERP SA (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Forum', 'category': 'Website', 'summary': 'Forum, FAQ, Q&A', 'version': '1.0', 'description': """ Ask questions, get answers, no distractions """, 'author': 'OpenERP SA', 'depends': [ 'auth_signup', 'gamification', 'website_mail', 'website_partner' ], 'data': [ 'data/forum_data.xml', 'views/forum.xml', 'views/res_users.xml', 'views/website_forum.xml', 'views/ir_qweb.xml', 'security/ir.model.access.csv', 'data/badges_question.xml', 'data/badges_answer.xml', 'data/badges_participation.xml', 'data/badges_moderation.xml', ], 'qweb': [ 'static/src/xml/*.xml' ], 'demo': [ 'data/forum_demo.xml', ], 'installable': True, 'application': True, }
agpl-3.0
SnappleCap/oh-mainline
vendor/packages/python-openid/examples/djopenid/server/views.py
63
9294
""" This module implements an example server for the OpenID library. Some functionality has been omitted intentionally; this code is intended to be instructive on the use of this library. This server does not perform actual user authentication and serves up only one OpenID URL, with the exception of IDP-generated identifiers. Some code conventions used here: * 'request' is a Django request object. * 'openid_request' is an OpenID library request object. * 'openid_response' is an OpenID library response """ import cgi from djopenid import util from djopenid.util import getViewURL from django import http from django.views.generic.simple import direct_to_template from openid.server.server import Server, ProtocolError, CheckIDRequest, \ EncodingError from openid.server.trustroot import verifyReturnTo from openid.yadis.discover import DiscoveryFailure from openid.consumer.discover import OPENID_IDP_2_0_TYPE from openid.extensions import sreg from openid.extensions import pape from openid.fetchers import HTTPFetchingError def getOpenIDStore(): """ Return an OpenID store object fit for the currently-chosen database backend, if any. """ return util.getOpenIDStore('/tmp/djopenid_s_store', 's_') def getServer(request): """ Get a Server object to perform OpenID authentication. """ return Server(getOpenIDStore(), getViewURL(request, endpoint)) def setRequest(request, openid_request): """ Store the openid request information in the session. """ if openid_request: request.session['openid_request'] = openid_request else: request.session['openid_request'] = None def getRequest(request): """ Get an openid request from the session, if any. """ return request.session.get('openid_request') def server(request): """ Respond to requests for the server's primary web page. """ return direct_to_template( request, 'server/index.html', {'user_url': getViewURL(request, idPage), 'server_xrds_url': getViewURL(request, idpXrds), }) def idpXrds(request): """ Respond to requests for the IDP's XRDS document, which is used in IDP-driven identifier selection. """ return util.renderXRDS( request, [OPENID_IDP_2_0_TYPE], [getViewURL(request, endpoint)]) def idPage(request): """ Serve the identity page for OpenID URLs. """ return direct_to_template( request, 'server/idPage.html', {'server_url': getViewURL(request, endpoint)}) def trustPage(request): """ Display the trust page template, which allows the user to decide whether to approve the OpenID verification. """ return direct_to_template( request, 'server/trust.html', {'trust_handler_url':getViewURL(request, processTrustResult)}) def endpoint(request): """ Respond to low-level OpenID protocol messages. """ s = getServer(request) query = util.normalDict(request.GET or request.POST) # First, decode the incoming request into something the OpenID # library can use. try: openid_request = s.decodeRequest(query) except ProtocolError, why: # This means the incoming request was invalid. return direct_to_template( request, 'server/endpoint.html', {'error': str(why)}) # If we did not get a request, display text indicating that this # is an endpoint. if openid_request is None: return direct_to_template( request, 'server/endpoint.html', {}) # We got a request; if the mode is checkid_*, we will handle it by # getting feedback from the user or by checking the session. if openid_request.mode in ["checkid_immediate", "checkid_setup"]: return handleCheckIDRequest(request, openid_request) else: # We got some other kind of OpenID request, so we let the # server handle this. openid_response = s.handleRequest(openid_request) return displayResponse(request, openid_response) def handleCheckIDRequest(request, openid_request): """ Handle checkid_* requests. Get input from the user to find out whether she trusts the RP involved. Possibly, get intput about what Simple Registration information, if any, to send in the response. """ # If the request was an IDP-driven identifier selection request # (i.e., the IDP URL was entered at the RP), then return the # default identity URL for this server. In a full-featured # provider, there could be interaction with the user to determine # what URL should be sent. if not openid_request.idSelect(): id_url = getViewURL(request, idPage) # Confirm that this server can actually vouch for that # identifier if id_url != openid_request.identity: # Return an error response error_response = ProtocolError( openid_request.message, "This server cannot verify the URL %r" % (openid_request.identity,)) return displayResponse(request, error_response) if openid_request.immediate: # Always respond with 'cancel' to immediate mode requests # because we don't track information about a logged-in user. # If we did, then the answer would depend on whether that user # had trusted the request's trust root and whether the user is # even logged in. openid_response = openid_request.answer(False) return displayResponse(request, openid_response) else: # Store the incoming request object in the session so we can # get to it later. setRequest(request, openid_request) return showDecidePage(request, openid_request) def showDecidePage(request, openid_request): """ Render a page to the user so a trust decision can be made. @type openid_request: openid.server.server.CheckIDRequest """ trust_root = openid_request.trust_root return_to = openid_request.return_to try: # Stringify because template's ifequal can only compare to strings. trust_root_valid = verifyReturnTo(trust_root, return_to) \ and "Valid" or "Invalid" except DiscoveryFailure, err: trust_root_valid = "DISCOVERY_FAILED" except HTTPFetchingError, err: trust_root_valid = "Unreachable" pape_request = pape.Request.fromOpenIDRequest(openid_request) return direct_to_template( request, 'server/trust.html', {'trust_root': trust_root, 'trust_handler_url':getViewURL(request, processTrustResult), 'trust_root_valid': trust_root_valid, 'pape_request': pape_request, }) def processTrustResult(request): """ Handle the result of a trust decision and respond to the RP accordingly. """ # Get the request from the session so we can construct the # appropriate response. openid_request = getRequest(request) # The identifier that this server can vouch for response_identity = getViewURL(request, idPage) # If the decision was to allow the verification, respond # accordingly. allowed = 'allow' in request.POST # Generate a response with the appropriate answer. openid_response = openid_request.answer(allowed, identity=response_identity) # Send Simple Registration data in the response, if appropriate. if allowed: sreg_data = { 'fullname': 'Example User', 'nickname': 'example', 'dob': '1970-01-01', 'email': 'invalid@example.com', 'gender': 'F', 'postcode': '12345', 'country': 'ES', 'language': 'eu', 'timezone': 'America/New_York', } sreg_req = sreg.SRegRequest.fromOpenIDRequest(openid_request) sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data) openid_response.addExtension(sreg_resp) pape_response = pape.Response() pape_response.setAuthLevel(pape.LEVELS_NIST, 0) openid_response.addExtension(pape_response) return displayResponse(request, openid_response) def displayResponse(request, openid_response): """ Display an OpenID response. Errors will be displayed directly to the user; successful responses and other protocol-level messages will be sent using the proper mechanism (i.e., direct response, redirection, etc.). """ s = getServer(request) # Encode the response into something that is renderable. try: webresponse = s.encodeResponse(openid_response) except EncodingError, why: # If it couldn't be encoded, display an error. text = why.response.encodeToKVForm() return direct_to_template( request, 'server/endpoint.html', {'error': cgi.escape(text)}) # Construct the appropriate django framework response. r = http.HttpResponse(webresponse.body) r.status_code = webresponse.code for header, value in webresponse.headers.iteritems(): r[header] = value return r
agpl-3.0
HiSPARC/station-software
user/python/Lib/site-packages/pip/_internal/wheel.py
7
32007
""" Support for installing and building the "wheel" binary package format. """ from __future__ import absolute_import import collections import compileall import csv import hashlib import logging import os.path import re import shutil import stat import sys import warnings from base64 import urlsafe_b64encode from email.parser import Parser from pip._vendor import pkg_resources from pip._vendor.distlib.scripts import ScriptMaker from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.six import StringIO from pip._internal import pep425tags from pip._internal.download import path_to_url, unpack_url from pip._internal.exceptions import ( InstallationError, InvalidWheelFilename, UnsupportedWheel, ) from pip._internal.locations import ( PIP_DELETE_MARKER_FILENAME, distutils_scheme, ) from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ( call_subprocess, captured_stdout, ensure_dir, read_chunks, ) from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.ui import open_spinner if MYPY_CHECK_RUNNING: from typing import Dict, List, Optional # noqa: F401 wheel_ext = '.whl' VERSION_COMPATIBLE = (1, 0) logger = logging.getLogger(__name__) def rehash(path, blocksize=1 << 20): """Return (hash, length) for path using hashlib.sha256()""" h = hashlib.sha256() length = 0 with open(path, 'rb') as f: for block in read_chunks(f, size=blocksize): length += len(block) h.update(block) digest = 'sha256=' + urlsafe_b64encode( h.digest() ).decode('latin1').rstrip('=') return (digest, length) def open_for_csv(name, mode): if sys.version_info[0] < 3: nl = {} bin = 'b' else: nl = {'newline': ''} bin = '' return open(name, mode + bin, **nl) def fix_script(path): """Replace #!python with #!/path/to/python Return True if file was changed.""" # XXX RECORD hashes will need to be updated if os.path.isfile(path): with open(path, 'rb') as script: firstline = script.readline() if not firstline.startswith(b'#!python'): return False exename = sys.executable.encode(sys.getfilesystemencoding()) firstline = b'#!' + exename + os.linesep.encode("ascii") rest = script.read() with open(path, 'wb') as script: script.write(firstline) script.write(rest) return True dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) \.dist-info$""", re.VERBOSE) def root_is_purelib(name, wheeldir): """ Return True if the extracted wheel in wheeldir should go into purelib. """ name_folded = name.replace("-", "_") for item in os.listdir(wheeldir): match = dist_info_re.match(item) if match and match.group('name') == name_folded: with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: for line in wheel: line = line.lower().rstrip() if line == "root-is-purelib: true": return True return False def get_entrypoints(filename): if not os.path.exists(filename): return {}, {} # This is done because you can pass a string to entry_points wrappers which # means that they may or may not be valid INI files. The attempt here is to # strip leading and trailing whitespace in order to make them valid INI # files. with open(filename) as fp: data = StringIO() for line in fp: data.write(line.strip()) data.write("\n") data.seek(0) # get the entry points and then the script names entry_points = pkg_resources.EntryPoint.parse_map(data) console = entry_points.get('console_scripts', {}) gui = entry_points.get('gui_scripts', {}) def _split_ep(s): """get the string representation of EntryPoint, remove space and split on '='""" return str(s).replace(" ", "").split("=") # convert the EntryPoint objects into strings with module:function console = dict(_split_ep(v) for v in console.values()) gui = dict(_split_ep(v) for v in gui.values()) return console, gui def message_about_scripts_not_on_PATH(scripts): # type: (List[str]) -> Optional[str] """Determine if any scripts are not on PATH and format a warning. Returns a warning message if one or more scripts are not on PATH, otherwise None. """ if not scripts: return None # Group scripts by the path they were installed in grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] for destfile in scripts: parent_dir = os.path.dirname(destfile) script_name = os.path.basename(destfile) grouped_by_dir[parent_dir].add(script_name) # We don't want to warn for directories that are on PATH. not_warn_dirs = [ os.path.normcase(i).rstrip(os.sep) for i in os.environ.get("PATH", "").split(os.pathsep) ] # If an executable sits with sys.executable, we don't warn for it. # This covers the case of venv invocations without activating the venv. not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) warn_for = { parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() if os.path.normcase(parent_dir) not in not_warn_dirs } if not warn_for: return None # Format a message msg_lines = [] for parent_dir, scripts in warn_for.items(): scripts = sorted(scripts) if len(scripts) == 1: start_text = "script {} is".format(scripts[0]) else: start_text = "scripts {} are".format( ", ".join(scripts[:-1]) + " and " + scripts[-1] ) msg_lines.append( "The {} installed in '{}' which is not on PATH." .format(start_text, parent_dir) ) last_line_fmt = ( "Consider adding {} to PATH or, if you prefer " "to suppress this warning, use --no-warn-script-location." ) if len(msg_lines) == 1: msg_lines.append(last_line_fmt.format("this directory")) else: msg_lines.append(last_line_fmt.format("these directories")) # Returns the formatted multiline message return "\n".join(msg_lines) def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, pycompile=True, scheme=None, isolated=False, prefix=None, warn_script_location=True): """Install a wheel""" if not scheme: scheme = distutils_scheme( name, user=user, home=home, root=root, isolated=isolated, prefix=prefix, ) if root_is_purelib(name, wheeldir): lib_dir = scheme['purelib'] else: lib_dir = scheme['platlib'] info_dir = [] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} changed = set() generated = [] # Compile all of the pyc files that we're going to be installing if pycompile: with captured_stdout() as stdout: with warnings.catch_warnings(): warnings.filterwarnings('ignore') compileall.compile_dir(source, force=True, quiet=True) logger.debug(stdout.getvalue()) def normpath(src, p): return os.path.relpath(src, p).replace(os.path.sep, '/') def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): ensure_dir(dest) # common for the 'include' path for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') and canonicalize_name(s).startswith( canonicalize_name(req.name))): assert not info_dir, ('Multiple .dist-info directories: ' + destsubdir + ', ' + ', '.join(info_dir)) info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. ensure_dir(destdir) # copyfile (called below) truncates the destination if it # exists and then writes the new contents. This is fine in most # cases, but can cause a segfault if pip has loaded a shared # object (e.g. from pyopenssl through its vendored urllib3) # Since the shared object is mmap'd an attempt to call a # symbol in it will then cause a segfault. Unlinking the file # allows writing of new contents while allowing the process to # continue to use the old copy. if os.path.exists(destfile): os.unlink(destfile) # We use copyfile (not move, copy, or copy2) to be extra sure # that we are not moving directories over (copyfile fails for # directories) as well as to ensure that we are not copying # over any metadata because we want more control over what # metadata we actually copy over. shutil.copyfile(srcfile, destfile) # Copy over the metadata for the file, currently this only # includes the atime and mtime. st = os.stat(srcfile) if hasattr(os, "utime"): os.utime(destfile, (st.st_atime, st.st_mtime)) # If our file is executable, then make our destination file # executable. if os.access(srcfile, os.X_OK): st = os.stat(srcfile) permissions = ( st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH ) os.chmod(destfile, permissions) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) assert info_dir, "%s .dist-info directory not found" % req # Get the defined entry points ep_file = os.path.join(info_dir[0], 'entry_points.txt') console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == 'scripts': fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) maker = ScriptMaker(None, scheme['scripts']) # Ensure old scripts are overwritten. # See https://github.com/pypa/pip/issues/1800 maker.clobber = True # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = {''} # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Simplify the script and fix the fact that the default script swallows # every single stack trace. # See https://bitbucket.org/pypa/distlib/issue/34/ # See https://bitbucket.org/pypa/distlib/issue/33/ def _get_script_text(entry): if entry.suffix is None: raise InstallationError( "Invalid script entry point: %s for req: %s - A callable " "suffix is required. Cf https://packaging.python.org/en/" "latest/distributing.html#console-scripts for more " "information." % (entry, req) ) return maker.script_template % { "module": entry.prefix, "import_name": entry.suffix.split(".")[0], "func": entry.suffix, } maker._get_script_text = _get_script_text maker.script_template = r"""# -*- coding: utf-8 -*- import re import sys from %(module)s import %(import_name)s if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) """ # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadata 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'pip = ' + pip_script generated.extend(maker.make(spec)) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": spec = 'pip%s = %s' % (sys.version[:1], pip_script) generated.extend(maker.make(spec)) spec = 'pip%s = %s' % (sys.version[:3], pip_script) generated.extend(maker.make(spec)) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'easy_install = ' + easy_install_script generated.extend(maker.make(spec)) spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) generated.extend(maker.make(spec)) # Delete any other versioned easy_install entry points easy_install_ep = [ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) ] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel if len(console) > 0: generated_console_scripts = maker.make_multiple( ['%s = %s' % kv for kv in console.items()] ) generated.extend(generated_console_scripts) if warn_script_location: msg = message_about_scripts_not_on_PATH(generated_console_scripts) if msg is not None: logger.warning(msg) if len(gui) > 0: generated.extend( maker.make_multiple( ['%s = %s' % kv for kv in gui.items()], {'gui': True} ) ) # Record pip as the installer installer = os.path.join(info_dir[0], 'INSTALLER') temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') with open(temp_installer, 'wb') as installer_file: installer_file.write(b'pip\n') shutil.move(temp_installer, installer) generated.append(installer) # Record details of all files installed record = os.path.join(info_dir[0], 'RECORD') temp_record = os.path.join(info_dir[0], 'RECORD.pip') with open_for_csv(record, 'r') as record_in: with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) writer = csv.writer(record_out) outrows = [] for row in reader: row[0] = installed.pop(row[0], row[0]) if row[0] in changed: row[1], row[2] = rehash(row[0]) outrows.append(tuple(row)) for f in generated: digest, length = rehash(f) outrows.append((normpath(f, lib_dir), digest, length)) for f in installed: outrows.append((installed[f], '', '')) for row in sorted(outrows): writer.writerow(row) shutil.move(temp_record, record) def wheel_version(source_dir): """ Return the Wheel-Version of an extracted wheel, if possible. Otherwise, return False if we couldn't parse / extract it. """ try: dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] wheel_data = dist.get_metadata('WHEEL') wheel_data = Parser().parsestr(wheel_data) version = wheel_data['Wheel-Version'].strip() version = tuple(map(int, version.split('.'))) return version except Exception: return False def check_compatibility(version, name): """ Raises errors or warns if called with an incompatible Wheel-Version. Pip should refuse to install a Wheel-Version that's a major series ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when installing a version only minor version ahead (e.g 1.2 > 1.1). version: a 2-tuple representing a Wheel-Version (Major, Minor) name: name of wheel or package to raise exception about :raises UnsupportedWheel: when an incompatible Wheel-Version is given """ if not version: raise UnsupportedWheel( "%s is in an unsupported or invalid wheel" % name ) if version[0] > VERSION_COMPATIBLE[0]: raise UnsupportedWheel( "%s's Wheel-Version (%s) is not compatible with this version " "of pip" % (name, '.'.join(map(str, version))) ) elif version > VERSION_COMPATIBLE: logger.warning( 'Installing from a newer Wheel-Version (%s)', '.'.join(map(str, version)), ) class Wheel(object): """A wheel file""" # TODO: maybe move the install code into this class wheel_file_re = re.compile( r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) \.whl|\.dist-info)$""", re.VERBOSE ) def __init__(self, filename): """ :raises InvalidWheelFilename: when the filename is invalid for a wheel """ wheel_info = self.wheel_file_re.match(filename) if not wheel_info: raise InvalidWheelFilename( "%s is not a valid wheel filename." % filename ) self.filename = filename self.name = wheel_info.group('name').replace('_', '-') # we'll assume "_" means "-" due to wheel naming scheme # (https://github.com/pypa/pip/issues/1150) self.version = wheel_info.group('ver').replace('_', '-') self.build_tag = wheel_info.group('build') self.pyversions = wheel_info.group('pyver').split('.') self.abis = wheel_info.group('abi').split('.') self.plats = wheel_info.group('plat').split('.') # All the tag combinations from this file self.file_tags = { (x, y, z) for x in self.pyversions for y in self.abis for z in self.plats } def support_index_min(self, tags=None): """ Return the lowest index that one of the wheel's file_tag combinations achieves in the supported_tags list e.g. if there are 8 supported tags, and one of the file tags is first in the list, then return 0. Returns None is the wheel is not supported. """ if tags is None: # for mock tags = pep425tags.get_supported() indexes = [tags.index(c) for c in self.file_tags if c in tags] return min(indexes) if indexes else None def supported(self, tags=None): """Is this wheel supported on this system?""" if tags is None: # for mock tags = pep425tags.get_supported() return bool(set(tags).intersection(self.file_tags)) class WheelBuilder(object): """Build wheels from a RequirementSet.""" def __init__(self, finder, preparer, wheel_cache, build_options=None, global_options=None, no_clean=False): self.finder = finder self.preparer = preparer self.wheel_cache = wheel_cache self._wheel_dir = preparer.wheel_download_dir self.build_options = build_options or [] self.global_options = global_options or [] self.no_clean = no_clean def _build_one(self, req, output_dir, python_tag=None): """Build one wheel. :return: The filename of the built wheel, or None if the build failed. """ # Install build deps into temporary directory (PEP 518) with req.build_env: return self._build_one_inside_env(req, output_dir, python_tag=python_tag) def _build_one_inside_env(self, req, output_dir, python_tag=None): with TempDirectory(kind="wheel") as temp_dir: if self.__build_one(req, temp_dir.path, python_tag=python_tag): try: wheel_name = os.listdir(temp_dir.path)[0] wheel_path = os.path.join(output_dir, wheel_name) shutil.move( os.path.join(temp_dir.path, wheel_name), wheel_path ) logger.info('Stored in directory: %s', output_dir) return wheel_path except Exception: pass # Ignore return, we can't do anything else useful. self._clean_one(req) return None def _base_setup_args(self, req): # NOTE: Eventually, we'd want to also -S to the flags here, when we're # isolating. Currently, it breaks Python in virtualenvs, because it # relies on site.py to find parts of the standard library outside the # virtualenv. return [ sys.executable, '-u', '-c', SETUPTOOLS_SHIM % req.setup_py ] + list(self.global_options) def __build_one(self, req, tempd, python_tag=None): base_args = self._base_setup_args(req) spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) with open_spinner(spin_message) as spinner: logger.debug('Destination directory: %s', tempd) wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ + self.build_options if python_tag is not None: wheel_args += ["--python-tag", python_tag] try: call_subprocess(wheel_args, cwd=req.setup_py_dir, show_stdout=False, spinner=spinner) return True except Exception: spinner.finish("error") logger.error('Failed building wheel for %s', req.name) return False def _clean_one(self, req): base_args = self._base_setup_args(req) logger.info('Running setup.py clean for %s', req.name) clean_args = base_args + ['clean', '--all'] try: call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) return True except Exception: logger.error('Failed cleaning build dir for %s', req.name) return False def build(self, requirements, session, autobuilding=False): """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ from pip._internal import index from pip._internal.models.link import Link building_is_possible = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir ) assert building_is_possible buildset = [] format_control = self.finder.format_control for req in requirements: if req.constraint: continue if req.is_wheel: if not autobuilding: logger.info( 'Skipping %s, due to already being wheel.', req.name, ) elif autobuilding and req.editable: pass elif autobuilding and not req.source_dir: pass elif autobuilding and req.link and not req.link.is_artifact: # VCS checkout. Build wheel just for this run. buildset.append((req, True)) else: ephem_cache = False if autobuilding: link = req.link base, ext = link.splitext() if index.egg_info_matches(base, None, link) is None: # E.g. local directory. Build wheel just for this run. ephem_cache = True if "binary" not in format_control.get_allowed_formats( canonicalize_name(req.name)): logger.info( "Skipping bdist_wheel for %s, due to binaries " "being disabled for it.", req.name, ) continue buildset.append((req, ephem_cache)) if not buildset: return True # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists(os.path.join( req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir ) # Update the link for this. req.link = Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_url( req.link, req.source_dir, None, False, session=session, ) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return True if all builds were successful return len(build_failure) == 0
gpl-3.0
HousekeepLtd/django
tests/null_fk/models.py
282
1422
""" Regression tests for proper working of ForeignKey(null=True). """ from django.db import models from django.utils.encoding import python_2_unicode_compatible class SystemDetails(models.Model): details = models.TextField() class SystemInfo(models.Model): system_details = models.ForeignKey(SystemDetails, models.CASCADE) system_name = models.CharField(max_length=32) class Forum(models.Model): system_info = models.ForeignKey(SystemInfo, models.CASCADE) forum_name = models.CharField(max_length=32) @python_2_unicode_compatible class Post(models.Model): forum = models.ForeignKey(Forum, models.SET_NULL, null=True) title = models.CharField(max_length=32) def __str__(self): return self.title @python_2_unicode_compatible class Comment(models.Model): post = models.ForeignKey(Post, models.SET_NULL, null=True) comment_text = models.CharField(max_length=250) class Meta: ordering = ('comment_text',) def __str__(self): return self.comment_text # Ticket 15823 class Item(models.Model): title = models.CharField(max_length=100) class PropertyValue(models.Model): label = models.CharField(max_length=100) class Property(models.Model): item = models.ForeignKey(Item, models.CASCADE, related_name='props') key = models.CharField(max_length=100) value = models.ForeignKey(PropertyValue, models.SET_NULL, null=True)
bsd-3-clause
bjwbell/servo
tests/wpt/css-tests/tools/html5lib/html5lib/tests/test_sanitizer.py
430
4645
from __future__ import absolute_import, division, unicode_literals try: import json except ImportError: import simplejson as json from html5lib import html5parser, sanitizer, constants, treebuilders def toxmlFactory(): tree = treebuilders.getTreeBuilder("etree") def toxml(element): # encode/decode roundtrip required for Python 2.6 compatibility result_bytes = tree.implementation.tostring(element, encoding="utf-8") return result_bytes.decode("utf-8") return toxml def runSanitizerTest(name, expected, input, toxml=None): if toxml is None: toxml = toxmlFactory() expected = ''.join([toxml(token) for token in html5parser.HTMLParser(). parseFragment(expected)]) expected = json.loads(json.dumps(expected)) assert expected == sanitize_html(input) def sanitize_html(stream, toxml=None): if toxml is None: toxml = toxmlFactory() return ''.join([toxml(token) for token in html5parser.HTMLParser(tokenizer=sanitizer.HTMLSanitizer). parseFragment(stream)]) def test_should_handle_astral_plane_characters(): assert '<html:p xmlns:html="http://www.w3.org/1999/xhtml">\U0001d4b5 \U0001d538</html:p>' == sanitize_html("<p>&#x1d4b5; &#x1d538;</p>") def test_sanitizer(): toxml = toxmlFactory() for tag_name in sanitizer.HTMLSanitizer.allowed_elements: if tag_name in ['caption', 'col', 'colgroup', 'optgroup', 'option', 'table', 'tbody', 'td', 'tfoot', 'th', 'thead', 'tr']: continue # TODO if tag_name != tag_name.lower(): continue # TODO if tag_name == 'image': yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name, "<img title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz", "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name), toxml) elif tag_name == 'br': yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name, "<br title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz<br/>", "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name), toxml) elif tag_name in constants.voidElements: yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name, "<%s title=\"1\"/>foo &lt;bad&gt;bar&lt;/bad&gt; baz" % tag_name, "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name), toxml) else: yield (runSanitizerTest, "test_should_allow_%s_tag" % tag_name, "<%s title=\"1\">foo &lt;bad&gt;bar&lt;/bad&gt; baz</%s>" % (tag_name, tag_name), "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name), toxml) for tag_name in sanitizer.HTMLSanitizer.allowed_elements: tag_name = tag_name.upper() yield (runSanitizerTest, "test_should_forbid_%s_tag" % tag_name, "&lt;%s title=\"1\"&gt;foo &lt;bad&gt;bar&lt;/bad&gt; baz&lt;/%s&gt;" % (tag_name, tag_name), "<%s title='1'>foo <bad>bar</bad> baz</%s>" % (tag_name, tag_name), toxml) for attribute_name in sanitizer.HTMLSanitizer.allowed_attributes: if attribute_name != attribute_name.lower(): continue # TODO if attribute_name == 'style': continue yield (runSanitizerTest, "test_should_allow_%s_attribute" % attribute_name, "<p %s=\"foo\">foo &lt;bad&gt;bar&lt;/bad&gt; baz</p>" % attribute_name, "<p %s='foo'>foo <bad>bar</bad> baz</p>" % attribute_name, toxml) for attribute_name in sanitizer.HTMLSanitizer.allowed_attributes: attribute_name = attribute_name.upper() yield (runSanitizerTest, "test_should_forbid_%s_attribute" % attribute_name, "<p>foo &lt;bad&gt;bar&lt;/bad&gt; baz</p>", "<p %s='display: none;'>foo <bad>bar</bad> baz</p>" % attribute_name, toxml) for protocol in sanitizer.HTMLSanitizer.allowed_protocols: yield (runSanitizerTest, "test_should_allow_%s_uris" % protocol, "<a href=\"%s\">foo</a>" % protocol, """<a href="%s">foo</a>""" % protocol, toxml) for protocol in sanitizer.HTMLSanitizer.allowed_protocols: yield (runSanitizerTest, "test_should_allow_uppercase_%s_uris" % protocol, "<a href=\"%s\">foo</a>" % protocol, """<a href="%s">foo</a>""" % protocol, toxml)
mpl-2.0
JamesMura/sentry
src/sentry/lang/native/systemsymbols.py
2
1561
from __future__ import absolute_import import logging from requests.exceptions import RequestException from sentry import options from sentry.http import Session from sentry.lang.native.utils import sdk_info_to_sdk_id MAX_ATTEMPTS = 3 logger = logging.getLogger(__name__) def lookup_system_symbols(symbols, sdk_info=None, cpu_name=None): """Looks for system symbols in the configured system server if enabled. If this failes or the server is disabled, `None` is returned. """ if not options.get('symbolserver.enabled'): return url = '%s/lookup' % options.get('symbolserver.options')['url'].rstrip('/') sess = Session() symbol_query = { 'sdk_id': sdk_info_to_sdk_id(sdk_info), 'cpu_name': cpu_name, 'symbols': symbols, } attempts = 0 with sess: while 1: try: rv = sess.post(url, json=symbol_query) # If the symbols server does not know about the SDK at all # it will report a 404 here. In that case just assume # that we did not find a match and do not retry. if rv.status_code == 404: return None rv.raise_for_status() return rv.json()['symbols'] except (IOError, RequestException): attempts += 1 if attempts > MAX_ATTEMPTS: logger.error('Failed to contact system symbol server', exc_info=True) return
bsd-3-clause
shrimpboyho/git.js
emscript/python/2.7.5.1_32bit/Lib/hotshot/__init__.py
215
2670
"""High-perfomance logging profiler, mostly written in C.""" import _hotshot from _hotshot import ProfilerError from warnings import warnpy3k as _warnpy3k _warnpy3k("The 'hotshot' module is not supported in 3.x, " "use the 'profile' module instead.", stacklevel=2) class Profile: def __init__(self, logfn, lineevents=0, linetimings=1): self.lineevents = lineevents and 1 or 0 self.linetimings = (linetimings and lineevents) and 1 or 0 self._prof = p = _hotshot.profiler( logfn, self.lineevents, self.linetimings) # Attempt to avoid confusing results caused by the presence of # Python wrappers around these functions, but only if we can # be sure the methods have not been overridden or extended. if self.__class__ is Profile: self.close = p.close self.start = p.start self.stop = p.stop self.addinfo = p.addinfo def close(self): """Close the logfile and terminate the profiler.""" self._prof.close() def fileno(self): """Return the file descriptor of the profiler's log file.""" return self._prof.fileno() def start(self): """Start the profiler.""" self._prof.start() def stop(self): """Stop the profiler.""" self._prof.stop() def addinfo(self, key, value): """Add an arbitrary labelled value to the profile log.""" self._prof.addinfo(key, value) # These methods offer the same interface as the profile.Profile class, # but delegate most of the work to the C implementation underneath. def run(self, cmd): """Profile an exec-compatible string in the script environment. The globals from the __main__ module are used as both the globals and locals for the script. """ import __main__ dict = __main__.__dict__ return self.runctx(cmd, dict, dict) def runctx(self, cmd, globals, locals): """Evaluate an exec-compatible string in a specific environment. The string is compiled before profiling begins. """ code = compile(cmd, "<string>", "exec") self._prof.runcode(code, globals, locals) return self def runcall(self, func, *args, **kw): """Profile a single call of a callable. Additional positional and keyword arguments may be passed along; the result of the call is returned, and exceptions are allowed to propogate cleanly, while ensuring that profiling is disabled on the way out. """ return self._prof.runcall(func, args, kw)
gpl-2.0
derks/cement
cement/core/arg.py
1
3461
""" Cement core argument module. """ from ..core import backend, exc, interface, handler Log = backend.minimal_logger(__name__) def argument_validator(klass, obj): """Validates a handler implementation against the IArgument interface.""" members = [ '_setup', 'parse', 'parsed_args', 'add_argument', ] interface.validate(IArgument, obj, members) class IArgument(interface.Interface): """ This class defines the Argument Handler Interface. Classes that implement this handler must provide the methods and attributes defined below. Implementations do *not* subclass from interfaces. Example: .. code-block:: python from cement.core import interface, arg class MyArgumentHandler(arg.CementArgumentHandler): class Meta: interface = arg.IArgument label = 'my_argument_handler' """ class IMeta: label = 'argument' validator = argument_validator # Must be provided by the implementation Meta = interface.Attribute('Handler Meta-data') parsed_args = interface.Attribute('Parsed args object') def _setup(app_obj): """ The _setup function is called during application initialization and must 'setup' the handler object making it ready for the framework or the application to make further calls to it. Required Arguments: app_obj The application object. Return: None """ def add_argument(self, *args, **kw): """ Add arguments for parsing. This should be -o/--option or positional. Positional Arguments: args List of option arguments. Generally something like ['-h', '--help']. Optional Arguments dest The destination name (var). Default: arg[0]'s string. help The help text for --help output (for that argument). action Must support: ['store', 'store_true', 'store_false', 'store_const'] const The value stored if action == 'store_const'. default The default value. Return: None """ def parse(self, arg_list): """ Parse the argument list (i.e. sys.argv). Can return any object as long as it's members contain those of the added arguments. For example, if adding a '-v/--version' option that stores to the dest of 'version', then the member must be callable as 'Object().version'. Must also set self.parsed_args to what is being returned. Required Arguments: arg_list A list of command line arguments. Return: Callable """ class CementArgumentHandler(handler.CementBaseHandler): """ Base class that all Argument Handlers should sub-class from. """ class Meta: label = None interface = IArgument def __init__(self, *args, **kw): super(CementArgumentHandler, self).__init__(*args, **kw)
bsd-3-clause
eayunstack/neutron
neutron/services/qos/drivers/sriov/driver.py
2
2128
# Copyright (c) 2016 Red Hat Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.api.definitions import portbindings from neutron_lib import constants from neutron_lib.db import constants as db_consts from neutron_lib.services.qos import base from neutron_lib.services.qos import constants as qos_consts from oslo_log import log as logging LOG = logging.getLogger(__name__) DRIVER = None SUPPORTED_RULES = { qos_consts.RULE_TYPE_BANDWIDTH_LIMIT: { qos_consts.MAX_KBPS: { 'type:range': [0, db_consts.DB_INTEGER_MAX_VALUE]}, qos_consts.MAX_BURST: { 'type:range': [0, db_consts.DB_INTEGER_MAX_VALUE]}, qos_consts.DIRECTION: { 'type:values': [constants.EGRESS_DIRECTION]} }, qos_consts.RULE_TYPE_MINIMUM_BANDWIDTH: { qos_consts.MIN_KBPS: { 'type:range': [0, db_consts.DB_INTEGER_MAX_VALUE]}, qos_consts.DIRECTION: {'type:values': [constants.EGRESS_DIRECTION]} } } class SRIOVNICSwitchDriver(base.DriverBase): @staticmethod def create(): return SRIOVNICSwitchDriver( name='sriovnicswitch', vif_types=[portbindings.VIF_TYPE_HW_VEB], vnic_types=[portbindings.VNIC_DIRECT, portbindings.VNIC_MACVTAP], supported_rules=SUPPORTED_RULES, requires_rpc_notifications=True) def register(): """Register the driver.""" global DRIVER if not DRIVER: DRIVER = SRIOVNICSwitchDriver.create() LOG.debug('SR-IOV NIC Switch QoS driver registered')
apache-2.0
JCardenasRdz/Machine-Learning-4-MRI
Imaging_of_pH/2-Isovue/Code/Python/BrukerMRI.py
4
13156
# pylint: disable-msg=C0103 """This should at some point be a library with functions to import and reconstruct Bruker MRI data. 2014, Joerg Doepfert """ import numpy as np # *********************************************************** # class definition # *********************************************************** class BrukerData: """Class to store and process data of a Bruker MRI Experiment""" def __init__(self, path="", ExpNum=0, B0=9.4): self.method = {} self.acqp = {} self.reco = {} self.raw_fid = np.array([]) self.proc_data = np.array([]) self.k_data = np.array([]) self.reco_data = np.array([]) self.reco_data_norm = np.array([]) # normalized reco self.B0 = B0 # only needed for UFZ method self.GyroRatio = 0 # only needed for UFZ method self.ConvFreqsFactor = 0 # reference to convert Hz <--> ppm self.path = path self.ExpNum = ExpNum def GenerateKspace(self): """Reorder the data in raw_fid to a valid k-space.""" if self.method == {}: raise NameError('No experiment loaded') elif self.method["Method"] == 'jd_UFZ_RAREst': self.k_data = self._GenKspace_UFZ_RARE() elif (self.method["Method"] == 'FLASH' or self.method["Method"] == 'mic_flash'): self.k_data = self._GenKspace_FLASH() else: raise NameError("Unknown method") def ReconstructKspace(self, **kwargs): """Transform the kspace data to image space. If it does not yet exist, generate it from the raw fid. Keyword arguments [**kwargs] can be supplied for some methods: All methods: - KspaceCutoffIdx: list lines to be set to zero in kspace prior to FT reconstruction jd_UFZ_RARExx: - NEchoes: Number of Echoes to be averaged. If NEchoes="opt", then the optimum number of echoes is calculated. If NEchoes=0, then all echoes are averaged. """ # Generate k_data prior to reconstruction, if it does not yet # exist if self.k_data.size == 0: self.GenerateKspace() self._ReconstructKspace_(**kwargs) else: self._ReconstructKspace_(**kwargs) return self.reco_data def _ReconstructKspace_(self, **kwargs): """Select which function to use for the reco, depending on the method.""" if self.method["Method"] == 'jd_UFZ_RAREst': self._Reco_UFZ_RARE(**kwargs) elif (self.method["Method"] == 'FLASH' or self.method["Method"] == 'mic_flash'): self. _Reco_FLASH(**kwargs) else: raise NameError("Unknown method") # *********************************************************** # method specific reordering and reco functions start here # *********************************************************** def _GenKspace_FLASH(self): complexValues = self.raw_fid NScans = (self.acqp["NI"] # no. of images * self.acqp["NAE"] # no. of experiments * self.acqp["NA"] # no. of averages * self.acqp["NR"]) # no. of repetitions Matrix = self.method["PVM_Matrix"] kSpace = np.reshape(complexValues, (-1,Matrix[0]), order="F") kSpace = np.reshape(kSpace, (-1, Matrix[0], Matrix[1])) kSpace = np.transpose(kSpace, (1,2,0)) return kSpace def _Reco_FLASH(self, **kwargs): k_data = self.k_data reco_data = np.zeros(k_data.shape) for i in range(0,self.k_data.shape[2]): reco_data[:,:,i] = abs(fft_image(self.k_data[:,:,i])) self.reco_data = reco_data def _GenKspace_UFZ_RARE(self): complexValues = self.raw_fid complexValues = RemoveVoidEntries(complexValues, self.acqp["ACQ_size"][0]) NEchoes = self.method["CEST_Number_Echoes"] NPoints = self.method["CEST_Number_SatFreqs"] NScans = self.method["PVM_NRepetitions"] return np.reshape(complexValues, (NPoints, NEchoes, NScans), order="F") def _Reco_UFZ_RARE(self, **kwargs): # use pop to set default values KspaceCutoffIdx = kwargs.pop("KspaceCutoffIdx", []) NEchoes = kwargs.pop("NEchoes", "opt") NScans = self.method["PVM_NRepetitions"] NPoints = self.method["CEST_Number_SatFreqs"] NRecoEchoes = np.ones(NScans, dtype=np.int) # Determine how many echoes should be averaged if NEchoes == "opt": # calc opt num of echoes to be averaged # choose to look at real, imag, or abs part of kspace Data = self.k_data.real # find the indizes of maximum kspace signal MaxIndizes = [] MaxIndizes.append(np.argmax(Data[:, 0, 0])) MaxIndizes.append(MaxIndizes[0] + 1 - 2*(Data[MaxIndizes[0]-1, 0, 0] > Data[MaxIndizes[0]+1, 0, 0])) # calc max of kspace echoes based on these indizes MaxEchoSignals = np.sum(Data[MaxIndizes, :, :], axis=0) # now calc opt num of echoes for each scan for i in range(0, NScans): NRecoEchoes[i] = CalcOptNEchoes(MaxEchoSignals[:, i]) # make sure that off and on scan have same amount of # NRecoEchoes, i.e. echoes to be averaged if self.method["CEST_AcqMode"] == "On_and_Off_Scan": NRecoEchoes[1::2] = NRecoEchoes[0::2] elif NEchoes == 0: # take all echoes NRecoEchoes = NRecoEchoes*self.method["CEST_Number_Echoes"] else: # take number given by user NRecoEchoes = NRecoEchoes*NEchoes # average the echoes KspaceAveraged = np.zeros((NPoints, NScans), dtype=complex) for i in range(0, NScans): RecoEchoes = range(0, NRecoEchoes[i]) KspaceAveraged[:, i] = np.mean( self.k_data[:, RecoEchoes, i], axis=1) KspaceAveraged[KspaceCutoffIdx, i] = 0 # save reco as FFT of the averaged kspace data self.reco_data, _ = FFT_center(KspaceAveraged) # normalize the data if possible if self.method["CEST_AcqMode"] == "On_and_Off_Scan": self.reco_data_norm = np.divide(abs(self.reco_data[:,1::2]), abs(self.reco_data[:,0::2])) # *********************************************************** # Functions # *********************************************************** def ReadExperiment(path, ExpNum): """Read in a Bruker MRI Experiment. Returns raw data, processed data, and method and acqp parameters in a dictionary. """ data = BrukerData(path, ExpNum) # parameter files data.method = ReadParamFile(path + str(ExpNum) + "/method") data.acqp = ReadParamFile(path + str(ExpNum) + "/acqp") data.reco = ReadParamFile(path + str(ExpNum) + "/pdata/1/reco") # processed data data.proc_data = ReadProcessedData(path + str(ExpNum) + "/pdata/1/2dseq", data.reco, data.acqp) # generate complex FID raw_data = ReadRawData(path + str(ExpNum) + "/fid") data.raw_fid = raw_data[0::2] + 1j * raw_data[1::2] # calculate GyroRatio and ConvFreqsFactor data.GyroRatio = data.acqp["SFO1"]*2*np.pi/data.B0*10**6 # in rad/Ts data.ConvFreqsFactor = 1/(data.GyroRatio*data.B0/10**6/2/np.pi) data.path = path data.ExpNum =ExpNum return data def CalcOptNEchoes(s): """Find out how many echoes in an echo train [s] have to be included into an averaging operation, such that the signal to noise (SNR) of the resulting averaged signal is maximized. Based on the formula shown in the supporting information of the [Doepfert et al. ChemPhysChem, 15(2), 261-264, 2014] """ # init vars s_sum = np.zeros(len(s)) s_sum[0] = s[0] TestFn = np.zeros(len(s)) SNR_averaged = np.zeros(len(s)) # not needed for calculation count = 1 for n in range(2, len(s)+1): SNR_averaged = np.sum(s[0:n] / np.sqrt(n)) s_sum[n-1] = s[n-1] + s_sum[n-2] TestFn[n-1] = s_sum[n-2]*(np.sqrt(float(n)/(float(n)-1))-1) if s[n-1] < TestFn[n-1]: break count += 1 return count def FFT_center(Kspace, sampling_rate=1, ax=0): """Calculate FFT of a time domain signal and shift the spectrum so that the center frequency is in the center. Additionally return the frequency axis, provided the right sampling frequency is given. If the data is 2D, then the FFT is performed succesively along an axis [ax]. """ FT = np.fft.fft(Kspace, axis=ax) spectrum = np.fft.fftshift(FT, axes=ax) n = FT.shape[ax] freq_axis = np.fft.fftshift( np.fft.fftfreq(n, 1/float(sampling_rate))) return spectrum, freq_axis def fft_image(Kspace): return np.fft.fftshift(np.fft.fft2(Kspace)) def RemoveVoidEntries(datavector, acqsize0): blocksize = int(np.ceil(float(acqsize0)/2/128)*128) DelIdx = [] for i in range(0, len(datavector)/blocksize): DelIdx.append(range(i * blocksize + acqsize0/2, (i + 1) * blocksize)) return np.delete(datavector, DelIdx) def ReadRawData(filepath): with open(filepath, "r") as f: return np.fromfile(f, dtype=np.int32) def ReadProcessedData(filepath, reco, acqp): with open(filepath, "r") as f: data = np.fromfile(f, dtype=np.int16) data = data.reshape(reco["RECO_size"][0], reco["RECO_size"][1], -1, order="F") if data.ndim == 3: data_length = data.shape[2] else: data_length = 1 data_reshaped = np.zeros([data.shape[1], data.shape[0], data_length]) for i in range(0, data_length): data_reshaped[:, :, i] = np.rot90(data[:, :, i]) return data_reshaped def ReadParamFile(filepath): """ Read a Bruker MRI experiment's method or acqp file to a dictionary. """ param_dict = {} with open(filepath, "r") as f: while True: line = f.readline() if not line: break # when line contains parameter if line.startswith('##$'): (param_name, current_line) = line[3:].split('=') # split at "=" # if current entry (current_line) is arraysize if current_line[0:2] == "( " and current_line[-3:-1] == " )": value = ParseArray(f, current_line) # if current entry (current_line) is struct/list elif current_line[0] == "(" and current_line[-3:-1] != " )": # if neccessary read in multiple lines while current_line[-2] != ")": current_line = current_line[0:-1] + f.readline() # parse the values to a list value = [ParseSingleValue(x) for x in current_line[1:-2].split(', ')] # otherwise current entry must be single string or number else: value = ParseSingleValue(current_line) # save parsed value to dict param_dict[param_name] = value return param_dict def ParseArray(current_file, line): # extract the arraysize and convert it to numpy line = line[1:-2].replace(" ", "").split(",") arraysize = np.array([int(x) for x in line]) # then extract the next line vallist = current_file.readline().split() # if the line was a string, then return it directly try: float(vallist[0]) except ValueError: return " ".join(vallist) # include potentially multiple lines while len(vallist) != np.prod(arraysize): vallist = vallist + current_file.readline().split() # try converting to int, if error, then to float try: vallist = [int(x) for x in vallist] except ValueError: vallist = [float(x) for x in vallist] # convert to numpy array if len(vallist) > 1: return np.reshape(np.array(vallist), arraysize) # or to plain number else: return vallist[0] def ParseSingleValue(val): try: # check if int result = int(val) except ValueError: try: # then check if float result = float(val) except ValueError: # if not, should be string. Remove newline character. result = val.rstrip('\n') return result # *********************************************************** # ----------------------------------------------------------- # *********************************************************** if __name__ == '__main__': pass
mit
meredith-digops/ansible
lib/ansible/modules/storage/netapp/netapp_e_storagepool.py
63
39794
#!/usr/bin/python # (c) 2016, NetApp, Inc # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: netapp_e_storagepool short_description: Manage disk groups and disk pools version_added: '2.2' description: - Create or remove disk groups and disk pools for NetApp E-series storage arrays. options: api_username: required: true description: - The username to authenticate with the SANtricity WebServices Proxy or embedded REST API. api_password: required: true description: - The password to authenticate with the SANtricity WebServices Proxy or embedded REST API. api_url: required: true description: - The url to the SANtricity WebServices Proxy or embedded REST API. validate_certs: required: false default: true description: - Should https certificates be validated? ssid: required: true description: - The ID of the array to manage (as configured on the web services proxy). state: required: true description: - Whether the specified storage pool should exist or not. - Note that removing a storage pool currently requires the removal of all defined volumes first. choices: ['present', 'absent'] name: required: true description: - The name of the storage pool to manage criteria_drive_count: description: - The number of disks to use for building the storage pool. The pool will be expanded if this number exceeds the number of disks already in place criteria_drive_type: description: - The type of disk (hdd or ssd) to use when searching for candidates to use. choices: ['hdd','ssd'] criteria_size_unit: description: - The unit used to interpret size parameters choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'] default: 'gb' criteria_drive_min_size: description: - The minimum individual drive size (in size_unit) to consider when choosing drives for the storage pool. criteria_min_usable_capacity: description: - The minimum size of the storage pool (in size_unit). The pool will be expanded if this value exceeds itscurrent size. criteria_drive_interface_type: description: - The interface type to use when selecting drives for the storage pool (no value means all interface types will be considered) choices: ['sas', 'sas4k', 'fibre', 'fibre520b', 'scsi', 'sata', 'pata'] criteria_drive_require_fde: description: - Whether full disk encryption ability is required for drives to be added to the storage pool raid_level: required: true choices: ['raidAll', 'raid0', 'raid1', 'raid3', 'raid5', 'raid6', 'raidDiskPool'] description: - "Only required when the requested state is 'present'. The RAID level of the storage pool to be created." erase_secured_drives: required: false choices: ['true', 'false'] description: - Whether to erase secured disks before adding to storage pool secure_pool: required: false choices: ['true', 'false'] description: - Whether to convert to a secure storage pool. Will only work if all drives in the pool are security capable. reserve_drive_count: required: false description: - Set the number of drives reserved by the storage pool for reconstruction operations. Only valide on raid disk pools. remove_volumes: required: false default: False description: - Prior to removing a storage pool, delete all volumes in the pool. author: Kevin Hulquest (@hulquest) ''' EXAMPLES = ''' - name: No disk groups netapp_e_storagepool: ssid: "{{ ssid }}" name: "{{ item }}" state: absent api_url: "{{ netapp_api_url }}" api_username: "{{ netapp_api_username }}" api_password: "{{ netapp_api_password }}" validate_certs: "{{ netapp_api_validate_certs }}" ''' RETURN = ''' msg: description: Success message returned: success type: string sample: Json facts for the pool that was created. ''' import json import logging from traceback import format_exc from ansible.module_utils.api import basic_auth_argument_spec from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.urls import open_url from ansible.module_utils.six.moves.urllib.error import HTTPError def request(url, data=None, headers=None, method='GET', use_proxy=True, force=False, last_mod_time=None, timeout=10, validate_certs=True, url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False): try: r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy, force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs, url_username=url_username, url_password=url_password, http_agent=http_agent, force_basic_auth=force_basic_auth) except HTTPError: err = get_exception() r = err.fp try: raw_data = r.read() if raw_data: data = json.loads(raw_data) else: raw_data = None except: if ignore_errors: pass else: raise Exception(raw_data) resp_code = r.getcode() if resp_code >= 400 and not ignore_errors: raise Exception(resp_code, data) else: return resp_code, data def select(predicate, iterable): # python 2, 3 generic filtering. if predicate is None: predicate = bool for x in iterable: if predicate(x): yield x class groupby(object): # python 2, 3 generic grouping. def __init__(self, iterable, key=None): if key is None: key = lambda x: x self.keyfunc = key self.it = iter(iterable) self.tgtkey = self.currkey = self.currvalue = object() def __iter__(self): return self def next(self): while self.currkey == self.tgtkey: self.currvalue = next(self.it) # Exit on StopIteration self.currkey = self.keyfunc(self.currvalue) self.tgtkey = self.currkey return (self.currkey, self._grouper(self.tgtkey)) def _grouper(self, tgtkey): while self.currkey == tgtkey: yield self.currvalue self.currvalue = next(self.it) # Exit on StopIteration self.currkey = self.keyfunc(self.currvalue) class NetAppESeriesStoragePool(object): def __init__(self): self._sp_drives_cached = None self._size_unit_map = dict( bytes=1, b=1, kb=1024, mb=1024 ** 2, gb=1024 ** 3, tb=1024 ** 4, pb=1024 ** 5, eb=1024 ** 6, zb=1024 ** 7, yb=1024 ** 8 ) argument_spec = basic_auth_argument_spec() argument_spec.update(dict( api_username=dict(type='str', required=True), api_password=dict(type='str', required=True, no_log=True), api_url=dict(type='str', required=True), state=dict(required=True, choices=['present', 'absent'], type='str'), ssid=dict(required=True, type='str'), name=dict(required=True, type='str'), criteria_size_unit=dict(default='gb', type='str'), criteria_drive_count=dict(type='int'), criteria_drive_interface_type=dict(choices=['sas', 'sas4k', 'fibre', 'fibre520b', 'scsi', 'sata', 'pata'], type='str'), criteria_drive_type=dict(choices=['ssd', 'hdd'], type='str'), criteria_drive_min_size=dict(type='int'), criteria_drive_require_fde=dict(type='bool'), criteria_min_usable_capacity=dict(type='int'), raid_level=dict( choices=['raidUnsupported', 'raidAll', 'raid0', 'raid1', 'raid3', 'raid5', 'raid6', 'raidDiskPool']), erase_secured_drives=dict(type='bool'), log_path=dict(type='str'), remove_drives=dict(type='list'), secure_pool=dict(type='bool', default=False), reserve_drive_count=dict(type='int'), remove_volumes=dict(type='bool', default=False) )) self.module = AnsibleModule( argument_spec=argument_spec, required_if=[ ('state', 'present', ['raid_level']) ], mutually_exclusive=[ ], # TODO: update validation for various selection criteria supports_check_mode=True ) p = self.module.params log_path = p['log_path'] # logging setup self._logger = logging.getLogger(self.__class__.__name__) self.debug = self._logger.debug if log_path: logging.basicConfig(level=logging.DEBUG, filename=log_path) self.state = p['state'] self.ssid = p['ssid'] self.name = p['name'] self.validate_certs = p['validate_certs'] self.criteria_drive_count = p['criteria_drive_count'] self.criteria_drive_type = p['criteria_drive_type'] self.criteria_size_unit = p['criteria_size_unit'] self.criteria_drive_min_size = p['criteria_drive_min_size'] self.criteria_min_usable_capacity = p['criteria_min_usable_capacity'] self.criteria_drive_interface_type = p['criteria_drive_interface_type'] self.criteria_drive_require_fde = p['criteria_drive_require_fde'] self.raid_level = p['raid_level'] self.erase_secured_drives = p['erase_secured_drives'] self.remove_drives = p['remove_drives'] self.secure_pool = p['secure_pool'] self.reserve_drive_count = p['reserve_drive_count'] self.remove_volumes = p['remove_volumes'] try: self.api_usr = p['api_username'] self.api_pwd = p['api_password'] self.api_url = p['api_url'] except KeyError: self.module.fail_json(msg="You must pass in api_username " "and api_password and api_url to the module.") self.post_headers = dict(Accept="application/json") self.post_headers['Content-Type'] = 'application/json' # Quick and dirty drive selector, since the one provided by web service proxy is broken for min_disk_size as of 2016-03-12. # Doesn't really need to be a class once this is in module_utils or retired- just groups everything together so we # can copy/paste to other modules more easily. # Filters all disks by specified criteria, then groups remaining disks by capacity, interface and disk type, and selects # the first set that matches the specified count and/or aggregate capacity. # class DriveSelector(object): def filter_drives( self, drives, # raw drives resp interface_type=None, # sas, sata, fibre, etc drive_type=None, # ssd/hdd spindle_speed=None, # 7200, 10000, 15000, ssd (=0) min_drive_size=None, max_drive_size=None, fde_required=None, size_unit='gb', min_total_capacity=None, min_drive_count=None, exact_drive_count=None, raid_level=None ): if min_total_capacity is None and exact_drive_count is None: raise Exception("One of criteria_min_total_capacity or criteria_drive_count must be specified.") if min_total_capacity: min_total_capacity = min_total_capacity * self._size_unit_map[size_unit] # filter clearly invalid/unavailable drives first drives = select(lambda d: self._is_valid_drive(d), drives) if interface_type: drives = select(lambda d: d['phyDriveType'] == interface_type, drives) if drive_type: drives = select(lambda d: d['driveMediaType'] == drive_type, drives) if spindle_speed is not None: # 0 is valid for ssds drives = select(lambda d: d['spindleSpeed'] == spindle_speed, drives) if min_drive_size: min_drive_size_bytes = min_drive_size * self._size_unit_map[size_unit] drives = select(lambda d: int(d['rawCapacity']) >= min_drive_size_bytes, drives) if max_drive_size: max_drive_size_bytes = max_drive_size * self._size_unit_map[size_unit] drives = select(lambda d: int(d['rawCapacity']) <= max_drive_size_bytes, drives) if fde_required: drives = select(lambda d: d['fdeCapable'], drives) # initial implementation doesn't have a preference for any of these values... # just return the first set we find that matches the requested disk count and/or minimum total capacity for (cur_capacity, drives_by_capacity) in groupby(drives, lambda d: int(d['rawCapacity'])): for (cur_interface_type, drives_by_interface_type) in groupby(drives_by_capacity, lambda d: d['phyDriveType']): for (cur_drive_type, drives_by_drive_type) in groupby(drives_by_interface_type, lambda d: d['driveMediaType']): # listify so we can consume more than once drives_by_drive_type = list(drives_by_drive_type) candidate_set = list() # reset candidate list on each iteration of the innermost loop if exact_drive_count: if len(drives_by_drive_type) < exact_drive_count: continue # we know this set is too small, move on for drive in drives_by_drive_type: candidate_set.append(drive) if self._candidate_set_passes(candidate_set, min_capacity_bytes=min_total_capacity, min_drive_count=min_drive_count, exact_drive_count=exact_drive_count, raid_level=raid_level): return candidate_set raise Exception("couldn't find an available set of disks to match specified criteria") def _is_valid_drive(self, d): is_valid = d['available'] \ and d['status'] == 'optimal' \ and not d['pfa'] \ and not d['removed'] \ and not d['uncertified'] \ and not d['invalidDriveData'] \ and not d['nonRedundantAccess'] return is_valid def _candidate_set_passes(self, candidate_set, min_capacity_bytes=None, min_drive_count=None, exact_drive_count=None, raid_level=None): if not self._is_drive_count_valid(len(candidate_set), min_drive_count=min_drive_count, exact_drive_count=exact_drive_count, raid_level=raid_level): return False # TODO: this assumes candidate_set is all the same size- if we want to allow wastage, need to update to use min size of set if min_capacity_bytes is not None and self._calculate_usable_capacity(int(candidate_set[0]['rawCapacity']), len(candidate_set), raid_level=raid_level) < min_capacity_bytes: return False return True def _calculate_usable_capacity(self, disk_size_bytes, disk_count, raid_level=None): if raid_level in [None, 'raid0']: return disk_size_bytes * disk_count if raid_level == 'raid1': return (disk_size_bytes * disk_count) / 2 if raid_level in ['raid3', 'raid5']: return (disk_size_bytes * disk_count) - disk_size_bytes if raid_level in ['raid6', 'raidDiskPool']: return (disk_size_bytes * disk_count) - (disk_size_bytes * 2) raise Exception("unsupported raid_level: %s" % raid_level) def _is_drive_count_valid(self, drive_count, min_drive_count=0, exact_drive_count=None, raid_level=None): if exact_drive_count and exact_drive_count != drive_count: return False if raid_level == 'raidDiskPool': if drive_count < 11: return False if raid_level == 'raid1': if drive_count % 2 != 0: return False if raid_level in ['raid3', 'raid5']: if drive_count < 3: return False if raid_level == 'raid6': if drive_count < 4: return False if min_drive_count and drive_count < min_drive_count: return False return True def get_storage_pool(self, storage_pool_name): # global ifilter self.debug("fetching storage pools") # map the storage pool name to its id try: (rc, resp) = request(self.api_url + "/storage-systems/%s/storage-pools" % (self.ssid), headers=dict(Accept="application/json"), url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs) except Exception: err = get_exception() rc = err.args[0] if rc == 404 and self.state == 'absent': self.module.exit_json( msg="Storage pool [%s] did not exist." % (self.name)) else: err = get_exception() self.module.exit_json( msg="Failed to get storage pools. Array id [%s]. Error[%s]. State[%s]. RC[%s]." % (self.ssid, str(err), self.state, rc)) self.debug("searching for storage pool '%s'" % storage_pool_name) pool_detail = next(select(lambda a: a['name'] == storage_pool_name, resp), None) if pool_detail: found = 'found' else: found = 'not found' self.debug(found) return pool_detail def get_candidate_disks(self): self.debug("getting candidate disks...") # driveCapacityMin is broken on /drives POST. Per NetApp request we built our own # switch back to commented code below if it gets fixed # drives_req = dict( # driveCount = self.criteria_drive_count, # sizeUnit = 'mb', # raidLevel = self.raid_level # ) # # if self.criteria_drive_type: # drives_req['driveType'] = self.criteria_drive_type # if self.criteria_disk_min_aggregate_size_mb: # drives_req['targetUsableCapacity'] = self.criteria_disk_min_aggregate_size_mb # # # TODO: this arg appears to be ignored, uncomment if it isn't # #if self.criteria_disk_min_size_gb: # # drives_req['driveCapacityMin'] = self.criteria_disk_min_size_gb * 1024 # (rc,drives_resp) = request(self.api_url + "/storage-systems/%s/drives" % (self.ssid), data=json.dumps(drives_req), headers=self.post_headers, # method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs) # # if rc == 204: # self.module.fail_json(msg='Cannot find disks to match requested criteria for storage pool') # disk_ids = [d['id'] for d in drives_resp] try: (rc, drives_resp) = request(self.api_url + "/storage-systems/%s/drives" % (self.ssid), method='GET', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs) except: err = get_exception() self.module.exit_json( msg="Failed to fetch disk drives. Array id [%s]. Error[%s]." % (self.ssid, str(err))) try: candidate_set = self.filter_drives(drives_resp, exact_drive_count=self.criteria_drive_count, drive_type=self.criteria_drive_type, min_drive_size=self.criteria_drive_min_size, raid_level=self.raid_level, size_unit=self.criteria_size_unit, min_total_capacity=self.criteria_min_usable_capacity, interface_type=self.criteria_drive_interface_type, fde_required=self.criteria_drive_require_fde ) except: err = get_exception() self.module.fail_json( msg="Failed to allocate adequate drive count. Id [%s]. Error [%s]." % (self.ssid, str(err))) disk_ids = [d['id'] for d in candidate_set] return disk_ids def create_storage_pool(self): self.debug("creating storage pool...") sp_add_req = dict( raidLevel=self.raid_level, diskDriveIds=self.disk_ids, name=self.name ) if self.erase_secured_drives: sp_add_req['eraseSecuredDrives'] = self.erase_secured_drives try: (rc, resp) = request(self.api_url + "/storage-systems/%s/storage-pools" % (self.ssid), data=json.dumps(sp_add_req), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to create storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str(err))) self.pool_detail = self.get_storage_pool(self.name) if self.secure_pool: secure_pool_data = dict(securePool=True) try: (retc, r) = request( self.api_url + "/storage-systems/%s/storage-pools/%s" % (self.ssid, self.pool_detail['id']), data=json.dumps(secure_pool_data), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120, ignore_errors=True) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to update storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str(err))) @property def needs_raid_level_migration(self): current_raid_level = self.pool_detail['raidLevel'] needs_migration = self.raid_level != current_raid_level if needs_migration: # sanity check some things so we can fail early/check-mode if current_raid_level == 'raidDiskPool': self.module.fail_json(msg="raid level cannot be changed for disk pools") return needs_migration def migrate_raid_level(self): self.debug("migrating storage pool to raid level '%s'..." % self.raid_level) sp_raid_migrate_req = dict( raidLevel=self.raid_level ) try: (rc, resp) = request( self.api_url + "/storage-systems/%s/storage-pools/%s/raid-type-migration" % (self.ssid, self.name), data=json.dumps(sp_raid_migrate_req), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to change the raid level of storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % ( pool_id, self.ssid, str(err))) @property def sp_drives(self, exclude_hotspares=True): if not self._sp_drives_cached: self.debug("fetching drive list...") try: (rc, resp) = request(self.api_url + "/storage-systems/%s/drives" % (self.ssid), method='GET', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to fetch disk drives. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str(err))) sp_id = self.pool_detail['id'] if exclude_hotspares: self._sp_drives_cached = [d for d in resp if d['currentVolumeGroupRef'] == sp_id and not d['hotSpare']] else: self._sp_drives_cached = [d for d in resp if d['currentVolumeGroupRef'] == sp_id] return self._sp_drives_cached @property def reserved_drive_count_differs(self): if int(self.pool_detail['volumeGroupData']['diskPoolData']['reconstructionReservedDriveCount']) != self.reserve_drive_count: return True return False @property def needs_expansion(self): if self.criteria_drive_count > len(self.sp_drives): return True # TODO: is totalRaidedSpace the best attribute for "how big is this SP"? if self.criteria_min_usable_capacity and \ (self.criteria_min_usable_capacity * self._size_unit_map[self.criteria_size_unit]) > int(self.pool_detail['totalRaidedSpace']): return True return False def get_expansion_candidate_drives(self): # sanity checks; don't call this if we can't/don't need to expand if not self.needs_expansion: self.module.fail_json(msg="can't get expansion candidates when pool doesn't need expansion") self.debug("fetching expansion candidate drives...") try: (rc, resp) = request( self.api_url + "/storage-systems/%s/storage-pools/%s/expand" % (self.ssid, self.pool_detail['id']), method='GET', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to fetch candidate drives for storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % ( pool_id, self.ssid, str(err))) current_drive_count = len(self.sp_drives) current_capacity_bytes = int(self.pool_detail['totalRaidedSpace']) # TODO: is this the right attribute to use? if self.criteria_min_usable_capacity: requested_capacity_bytes = self.criteria_min_usable_capacity * self._size_unit_map[self.criteria_size_unit] else: requested_capacity_bytes = current_capacity_bytes if self.criteria_drive_count: minimum_disks_to_add = max((self.criteria_drive_count - current_drive_count), 1) else: minimum_disks_to_add = 1 minimum_bytes_to_add = max(requested_capacity_bytes - current_capacity_bytes, 0) # FUTURE: allow more control over expansion candidate selection? # loop over candidate disk sets and add until we've met both criteria added_drive_count = 0 added_capacity_bytes = 0 drives_to_add = set() for s in resp: # don't trust the API not to give us duplicate drives across candidate sets, especially in multi-drive sets candidate_drives = s['drives'] if len(drives_to_add.intersection(candidate_drives)) != 0: # duplicate, skip continue drives_to_add.update(candidate_drives) added_drive_count += len(candidate_drives) added_capacity_bytes += int(s['usableCapacity']) if added_drive_count >= minimum_disks_to_add and added_capacity_bytes >= minimum_bytes_to_add: break if (added_drive_count < minimum_disks_to_add) or (added_capacity_bytes < minimum_bytes_to_add): self.module.fail_json( msg="unable to find at least %s drives to add that would add at least %s bytes of capacity" % ( minimum_disks_to_add, minimum_bytes_to_add)) return list(drives_to_add) def expand_storage_pool(self): drives_to_add = self.get_expansion_candidate_drives() self.debug("adding %s drives to storage pool..." % len(drives_to_add)) sp_expand_req = dict( drives=drives_to_add ) try: request( self.api_url + "/storage-systems/%s/storage-pools/%s/expand" % (self.ssid, self.pool_detail['id']), data=json.dumps(sp_expand_req), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to add drives to storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str( err))) # TODO: check response # TODO: support blocking wait? def reduce_drives(self, drive_list): if all(drive in drive_list for drive in self.sp_drives): # all the drives passed in are present in the system pass else: self.module.fail_json( msg="One of the drives you wish to remove does not currently exist in the storage pool you specified") try: (rc, resp) = request( self.api_url + "/storage-systems/%s/storage-pools/%s/reduction" % (self.ssid, self.pool_detail['id']), data=json.dumps(drive_list), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to remove drives from storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % ( pool_id, self.ssid, str(err))) def update_reserve_drive_count(self, qty): data = dict(reservedDriveCount=qty) try: (rc, resp) = request( self.api_url + "/storage-systems/%s/storage-pools/%s" % (self.ssid, self.pool_detail['id']), data=json.dumps(data), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() pool_id = self.pool_detail['id'] self.module.exit_json( msg="Failed to update reserve drive count. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str( err))) def apply(self): changed = False pool_exists = False self.pool_detail = self.get_storage_pool(self.name) if self.pool_detail: pool_exists = True pool_id = self.pool_detail['id'] if self.state == 'absent': self.debug("CHANGED: storage pool exists, but requested state is 'absent'") changed = True elif self.state == 'present': # sanity checks first- we can't change these, so we'll bomb if they're specified if self.criteria_drive_type and self.criteria_drive_type != self.pool_detail['driveMediaType']: self.module.fail_json( msg="drive media type %s cannot be changed to %s" % (self.pool_detail['driveMediaType'], self.criteria_drive_type)) # now the things we can change... if self.needs_expansion: self.debug("CHANGED: storage pool needs expansion") changed = True if self.needs_raid_level_migration: self.debug( "CHANGED: raid level migration required; storage pool uses '%s', requested is '%s'" % ( self.pool_detail['raidLevel'], self.raid_level)) changed = True # if self.reserved_drive_count_differs: # changed = True # TODO: validate other state details? (pool priority, alert threshold) # per FPoole and others, pool reduce operations will not be supported. Automatic "smart" reduction # presents a difficult parameter issue, as the disk count can increase due to expansion, so we # can't just use disk count > criteria_drive_count. else: # pool does not exist if self.state == 'present': self.debug("CHANGED: storage pool does not exist, but requested state is 'present'") changed = True # ensure we can get back a workable set of disks # (doing this early so candidate selection runs under check mode) self.disk_ids = self.get_candidate_disks() else: self.module.exit_json(msg="Storage pool [%s] did not exist." % (self.name)) if changed and not self.module.check_mode: # apply changes if self.state == 'present': if not pool_exists: self.create_storage_pool() else: # pool exists but differs, modify... if self.needs_expansion: self.expand_storage_pool() if self.remove_drives: self.reduce_drives(self.remove_drives) if self.needs_raid_level_migration: self.migrate_raid_level() # if self.reserved_drive_count_differs: # self.update_reserve_drive_count(self.reserve_drive_count) if self.secure_pool: secure_pool_data = dict(securePool=True) try: (retc, r) = request( self.api_url + "/storage-systems/%s/storage-pools/%s" % (self.ssid, self.pool_detail[ 'id']), data=json.dumps(secure_pool_data), headers=self.post_headers, method='POST', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120, ignore_errors=True) except: err = get_exception() self.module.exit_json( msg="Failed to delete storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % ( pool_id, self.ssid, str(err))) if int(retc) == 422: self.module.fail_json( msg="Error in enabling secure pool. One of the drives in the specified storage pool is likely not security capable") elif self.state == 'absent': # delete the storage pool try: remove_vol_opt = '' if self.remove_volumes: remove_vol_opt = '?delete-volumes=true' (rc, resp) = request( self.api_url + "/storage-systems/%s/storage-pools/%s%s" % (self.ssid, pool_id, remove_vol_opt), method='DELETE', url_username=self.api_usr, url_password=self.api_pwd, validate_certs=self.validate_certs, timeout=120) except: err = get_exception() self.module.exit_json( msg="Failed to delete storage pool. Pool id [%s]. Array id [%s]. Error[%s]." % (pool_id, self.ssid, str(err))) self.module.exit_json(changed=changed, **self.pool_detail) def main(): sp = NetAppESeriesStoragePool() try: sp.apply() except Exception: e = get_exception() sp.debug("Exception in apply(): \n%s" % format_exc(e)) raise if __name__ == '__main__': main()
gpl-3.0
infobloxopen/infoblox-netmri
infoblox_netmri/api/broker/v3_2_0/auth_user_broker.py
6
93491
from ..broker import Broker class AuthUserBroker(Broker): controller = "auth_users" def index(self, **kwargs): """Lists the available auth users. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient. **Inputs** | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Array of Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` id :param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each AuthUser. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_users: An array of the AuthUser objects that match the specified input criteria. :rtype auth_users: Array of AuthUser """ return self.api_list_request(self._get_method_fullname("index"), kwargs) def show(self, **kwargs): """Shows the details for the specified auth user. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: The auth user identified by the specified id. :rtype auth_user: AuthUser """ return self.api_request(self._get_method_fullname("show"), kwargs) def search(self, **kwargs): """Lists the available auth users matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below. **Inputs** | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param account_disabled: A flag indicating whether this user's account has been administratively disabled. :type account_disabled: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param account_disabled: A flag indicating whether this user's account has been administratively disabled. :type account_disabled: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param account_disabled_date: The date and time that the user's account was disabled. :type account_disabled_date: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param account_disabled_date: The date and time that the user's account was disabled. :type account_disabled_date: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param account_locked: A flag indicating whether or not this account is locked due to failed login attempts. :type account_locked: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param account_locked: A flag indicating whether or not this account is locked due to failed login attempts. :type account_locked: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param account_locked_date: The date and time that the user's account was locked. :type account_locked_date: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param account_locked_date: The date and time that the user's account was locked. :type account_locked_date: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param auth_service_id: The id of the last authentication service where this user was authenticated. :type auth_service_id: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param auth_service_id: The id of the last authentication service where this user was authenticated. :type auth_service_id: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param cert: Client Certificate stored on client success authorization when CAC is enabled :type cert: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param cert: Client Certificate stored on client success authorization when CAC is enabled :type cert: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param cli_creds_enabled_ind: A flag indicating whether or not to use this user's individual CLI credentials for device interaction. :type cli_creds_enabled_ind: Boolean | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_creds_enabled_ind: A flag indicating whether or not to use this user's individual CLI credentials for device interaction. :type cli_creds_enabled_ind: Array of Boolean | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param consecutive_failed_logins: The number of failed logins since the last successful login. :type consecutive_failed_logins: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param consecutive_failed_logins: The number of failed logins since the last successful login. :type consecutive_failed_logins: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param created_at: The date and time the record was initially created in NetMRI. :type created_at: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param created_at: The date and time the record was initially created in NetMRI. :type created_at: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param db_creds_enabled_ind: A flag which indicates that the user has database credentials enabled. :type db_creds_enabled_ind: Boolean | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param db_creds_enabled_ind: A flag which indicates that the user has database credentials enabled. :type db_creds_enabled_ind: Array of Boolean | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param db_username: Username for MySQL Database. :type db_username: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param db_username: Username for MySQL Database. :type db_username: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param email: The user's email address. :type email: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param email: The user's email address. :type email: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param expiration: The expiration date for this user's password. :type expiration: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param expiration: The expiration date for this user's password. :type expiration: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param first_name: The user's first name. :type first_name: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param first_name: The user's first name. :type first_name: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param force_local_ind: A flag indicating whether user is forced to use local authorization or not. :type force_local_ind: Boolean | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param force_local_ind: A flag indicating whether user is forced to use local authorization or not. :type force_local_ind: Array of Boolean | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param is_system: A flag indicating whether this is a built-in user. Built-in users cannot be removed. :type is_system: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param is_system: A flag indicating whether this is a built-in user. Built-in users cannot be removed. :type is_system: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param last_local_authz_ind: The source where the last authorization came from. May be 0 - Remote, 1 - Local, 2 - Forced Local :type last_local_authz_ind: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_local_authz_ind: The source where the last authorization came from. May be 0 - Remote, 1 - Local, 2 - Forced Local :type last_local_authz_ind: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param last_login: The date and time this user last logged into the NetMRI. :type last_login: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_login: The date and time this user last logged into the NetMRI. :type last_login: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param last_name: The user's last name. :type last_name: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_name: The user's last name. :type last_name: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param notes: Notes on the user, was entered by the administrator. :type notes: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param notes: Notes on the user, was entered by the administrator. :type notes: Array of String | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param password_version: version of encryption used to encrypt password :type password_version: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param password_version: version of encryption used to encrypt password :type password_version: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param secure_version: The encryption version of the username and passwords. :type secure_version: Integer | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param secure_version: The encryption version of the username and passwords. :type secure_version: Array of Integer | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param updated_at: The date and time the record was last modified in NetMRI. :type updated_at: DateTime | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param updated_at: The date and time the record was last modified in NetMRI. :type updated_at: Array of DateTime | ``api version min:`` 2.3 | ``api version max:`` 2.4 | ``required:`` False | ``default:`` None :param user_name: The user's login name. :type user_name: String | ``api version min:`` 2.5 | ``api version max:`` None | ``required:`` False | ``default:`` None :param user_name: The user's login name. :type user_name: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` id :param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each AuthUser. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param query: This value will be matched against auth users, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: account_disabled, account_disabled_date, account_locked, account_locked_date, auth_service_id, cert, cli_creds_enabled_ind, consecutive_failed_logins, created_at, db_creds_enabled_ind, db_username, email, expiration, first_name, force_local_ind, id, is_system, last_local_authz_ind, last_login, last_name, notes, password_version, secure_version, updated_at, user_name. :type query: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_users: An array of the AuthUser objects that match the specified input criteria. :rtype auth_users: Array of AuthUser """ return self.api_list_request(self._get_method_fullname("search"), kwargs) def find(self, **kwargs): """Lists the available auth users matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: account_disabled, account_disabled_date, account_locked, account_locked_date, auth_service_id, cert, cli_creds_enabled_ind, consecutive_failed_logins, created_at, db_creds_enabled_ind, db_username, email, expiration, first_name, force_local_ind, id, is_system, last_local_authz_ind, last_login, last_name, notes, password_version, secure_version, updated_at, user_name. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_account_disabled: The operator to apply to the field account_disabled. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. account_disabled: A flag indicating whether this user's account has been administratively disabled. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_account_disabled: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_account_disabled: If op_account_disabled is specified, the field named in this input will be compared to the value in account_disabled using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_account_disabled must be specified if op_account_disabled is specified. :type val_f_account_disabled: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_account_disabled: If op_account_disabled is specified, this value will be compared to the value in account_disabled using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_account_disabled must be specified if op_account_disabled is specified. :type val_c_account_disabled: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_account_disabled_date: The operator to apply to the field account_disabled_date. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. account_disabled_date: The date and time that the user's account was disabled. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_account_disabled_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_account_disabled_date: If op_account_disabled_date is specified, the field named in this input will be compared to the value in account_disabled_date using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_account_disabled_date must be specified if op_account_disabled_date is specified. :type val_f_account_disabled_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_account_disabled_date: If op_account_disabled_date is specified, this value will be compared to the value in account_disabled_date using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_account_disabled_date must be specified if op_account_disabled_date is specified. :type val_c_account_disabled_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_account_locked: The operator to apply to the field account_locked. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. account_locked: A flag indicating whether or not this account is locked due to failed login attempts. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_account_locked: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_account_locked: If op_account_locked is specified, the field named in this input will be compared to the value in account_locked using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_account_locked must be specified if op_account_locked is specified. :type val_f_account_locked: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_account_locked: If op_account_locked is specified, this value will be compared to the value in account_locked using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_account_locked must be specified if op_account_locked is specified. :type val_c_account_locked: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_account_locked_date: The operator to apply to the field account_locked_date. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. account_locked_date: The date and time that the user's account was locked. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_account_locked_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_account_locked_date: If op_account_locked_date is specified, the field named in this input will be compared to the value in account_locked_date using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_account_locked_date must be specified if op_account_locked_date is specified. :type val_f_account_locked_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_account_locked_date: If op_account_locked_date is specified, this value will be compared to the value in account_locked_date using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_account_locked_date must be specified if op_account_locked_date is specified. :type val_c_account_locked_date: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_auth_service_id: The operator to apply to the field auth_service_id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. auth_service_id: The id of the last authentication service where this user was authenticated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_auth_service_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_auth_service_id: If op_auth_service_id is specified, the field named in this input will be compared to the value in auth_service_id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_auth_service_id must be specified if op_auth_service_id is specified. :type val_f_auth_service_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_auth_service_id: If op_auth_service_id is specified, this value will be compared to the value in auth_service_id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_auth_service_id must be specified if op_auth_service_id is specified. :type val_c_auth_service_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_cert: The operator to apply to the field cert. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. cert: Client Certificate stored on client success authorization when CAC is enabled For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_cert: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_cert: If op_cert is specified, the field named in this input will be compared to the value in cert using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_cert must be specified if op_cert is specified. :type val_f_cert: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_cert: If op_cert is specified, this value will be compared to the value in cert using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_cert must be specified if op_cert is specified. :type val_c_cert: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_cli_creds_enabled_ind: The operator to apply to the field cli_creds_enabled_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. cli_creds_enabled_ind: A flag indicating whether or not to use this user's individual CLI credentials for device interaction. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_cli_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_cli_creds_enabled_ind: If op_cli_creds_enabled_ind is specified, the field named in this input will be compared to the value in cli_creds_enabled_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_cli_creds_enabled_ind must be specified if op_cli_creds_enabled_ind is specified. :type val_f_cli_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_cli_creds_enabled_ind: If op_cli_creds_enabled_ind is specified, this value will be compared to the value in cli_creds_enabled_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_cli_creds_enabled_ind must be specified if op_cli_creds_enabled_ind is specified. :type val_c_cli_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_consecutive_failed_logins: The operator to apply to the field consecutive_failed_logins. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. consecutive_failed_logins: The number of failed logins since the last successful login. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_consecutive_failed_logins: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_consecutive_failed_logins: If op_consecutive_failed_logins is specified, the field named in this input will be compared to the value in consecutive_failed_logins using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_consecutive_failed_logins must be specified if op_consecutive_failed_logins is specified. :type val_f_consecutive_failed_logins: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_consecutive_failed_logins: If op_consecutive_failed_logins is specified, this value will be compared to the value in consecutive_failed_logins using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_consecutive_failed_logins must be specified if op_consecutive_failed_logins is specified. :type val_c_consecutive_failed_logins: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_created_at: The operator to apply to the field created_at. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. created_at: The date and time the record was initially created in NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_created_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_created_at: If op_created_at is specified, the field named in this input will be compared to the value in created_at using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_created_at must be specified if op_created_at is specified. :type val_f_created_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_created_at: If op_created_at is specified, this value will be compared to the value in created_at using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_created_at must be specified if op_created_at is specified. :type val_c_created_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_db_creds_enabled_ind: The operator to apply to the field db_creds_enabled_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. db_creds_enabled_ind: A flag which indicates that the user has database credentials enabled. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_db_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_db_creds_enabled_ind: If op_db_creds_enabled_ind is specified, the field named in this input will be compared to the value in db_creds_enabled_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_db_creds_enabled_ind must be specified if op_db_creds_enabled_ind is specified. :type val_f_db_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_db_creds_enabled_ind: If op_db_creds_enabled_ind is specified, this value will be compared to the value in db_creds_enabled_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_db_creds_enabled_ind must be specified if op_db_creds_enabled_ind is specified. :type val_c_db_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_db_username: The operator to apply to the field db_username. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. db_username: Username for MySQL Database. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_db_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_db_username: If op_db_username is specified, the field named in this input will be compared to the value in db_username using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_db_username must be specified if op_db_username is specified. :type val_f_db_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_db_username: If op_db_username is specified, this value will be compared to the value in db_username using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_db_username must be specified if op_db_username is specified. :type val_c_db_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_email: The operator to apply to the field email. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. email: The user's email address. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_email: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_email: If op_email is specified, the field named in this input will be compared to the value in email using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_email must be specified if op_email is specified. :type val_f_email: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_email: If op_email is specified, this value will be compared to the value in email using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_email must be specified if op_email is specified. :type val_c_email: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_expiration: The operator to apply to the field expiration. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. expiration: The expiration date for this user's password. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_expiration: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_expiration: If op_expiration is specified, the field named in this input will be compared to the value in expiration using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_expiration must be specified if op_expiration is specified. :type val_f_expiration: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_expiration: If op_expiration is specified, this value will be compared to the value in expiration using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_expiration must be specified if op_expiration is specified. :type val_c_expiration: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_first_name: The operator to apply to the field first_name. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. first_name: The user's first name. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_first_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_first_name: If op_first_name is specified, the field named in this input will be compared to the value in first_name using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_first_name must be specified if op_first_name is specified. :type val_f_first_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_first_name: If op_first_name is specified, this value will be compared to the value in first_name using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_first_name must be specified if op_first_name is specified. :type val_c_first_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_force_local_ind: The operator to apply to the field force_local_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. force_local_ind: A flag indicating whether user is forced to use local authorization or not. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_force_local_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_force_local_ind: If op_force_local_ind is specified, the field named in this input will be compared to the value in force_local_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_force_local_ind must be specified if op_force_local_ind is specified. :type val_f_force_local_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_force_local_ind: If op_force_local_ind is specified, this value will be compared to the value in force_local_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_force_local_ind must be specified if op_force_local_ind is specified. :type val_c_force_local_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_id: The operator to apply to the field id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. id: The internal NetMRI identifier for this user. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_id: If op_id is specified, the field named in this input will be compared to the value in id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_id must be specified if op_id is specified. :type val_f_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_id: If op_id is specified, this value will be compared to the value in id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_id must be specified if op_id is specified. :type val_c_id: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_is_system: The operator to apply to the field is_system. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. is_system: A flag indicating whether this is a built-in user. Built-in users cannot be removed. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_is_system: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_is_system: If op_is_system is specified, the field named in this input will be compared to the value in is_system using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_is_system must be specified if op_is_system is specified. :type val_f_is_system: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_is_system: If op_is_system is specified, this value will be compared to the value in is_system using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_is_system must be specified if op_is_system is specified. :type val_c_is_system: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_last_local_authz_ind: The operator to apply to the field last_local_authz_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. last_local_authz_ind: The source where the last authorization came from. May be 0 - Remote, 1 - Local, 2 - Forced Local For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_last_local_authz_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_last_local_authz_ind: If op_last_local_authz_ind is specified, the field named in this input will be compared to the value in last_local_authz_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_last_local_authz_ind must be specified if op_last_local_authz_ind is specified. :type val_f_last_local_authz_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_last_local_authz_ind: If op_last_local_authz_ind is specified, this value will be compared to the value in last_local_authz_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_last_local_authz_ind must be specified if op_last_local_authz_ind is specified. :type val_c_last_local_authz_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_last_login: The operator to apply to the field last_login. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. last_login: The date and time this user last logged into the NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_last_login: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_last_login: If op_last_login is specified, the field named in this input will be compared to the value in last_login using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_last_login must be specified if op_last_login is specified. :type val_f_last_login: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_last_login: If op_last_login is specified, this value will be compared to the value in last_login using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_last_login must be specified if op_last_login is specified. :type val_c_last_login: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_last_name: The operator to apply to the field last_name. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. last_name: The user's last name. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_last_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_last_name: If op_last_name is specified, the field named in this input will be compared to the value in last_name using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_last_name must be specified if op_last_name is specified. :type val_f_last_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_last_name: If op_last_name is specified, this value will be compared to the value in last_name using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_last_name must be specified if op_last_name is specified. :type val_c_last_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_notes: The operator to apply to the field notes. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. notes: Notes on the user, was entered by the administrator. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_notes: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_notes: If op_notes is specified, the field named in this input will be compared to the value in notes using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_notes must be specified if op_notes is specified. :type val_f_notes: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_notes: If op_notes is specified, this value will be compared to the value in notes using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_notes must be specified if op_notes is specified. :type val_c_notes: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_password_version: The operator to apply to the field password_version. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. password_version: version of encryption used to encrypt password For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_password_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_password_version: If op_password_version is specified, the field named in this input will be compared to the value in password_version using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_password_version must be specified if op_password_version is specified. :type val_f_password_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_password_version: If op_password_version is specified, this value will be compared to the value in password_version using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_password_version must be specified if op_password_version is specified. :type val_c_password_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_secure_version: The operator to apply to the field secure_version. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. secure_version: The encryption version of the username and passwords. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_secure_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_secure_version: If op_secure_version is specified, the field named in this input will be compared to the value in secure_version using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_secure_version must be specified if op_secure_version is specified. :type val_f_secure_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_secure_version: If op_secure_version is specified, this value will be compared to the value in secure_version using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_secure_version must be specified if op_secure_version is specified. :type val_c_secure_version: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_updated_at: The operator to apply to the field updated_at. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. updated_at: The date and time the record was last modified in NetMRI. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_updated_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_updated_at: If op_updated_at is specified, the field named in this input will be compared to the value in updated_at using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_updated_at must be specified if op_updated_at is specified. :type val_f_updated_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_updated_at: If op_updated_at is specified, this value will be compared to the value in updated_at using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_updated_at must be specified if op_updated_at is specified. :type val_c_updated_at: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param op_user_name: The operator to apply to the field user_name. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. user_name: The user's login name. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values. :type op_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_f_user_name: If op_user_name is specified, the field named in this input will be compared to the value in user_name using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_user_name must be specified if op_user_name is specified. :type val_f_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param val_c_user_name: If op_user_name is specified, this value will be compared to the value in user_name using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_user_name must be specified if op_user_name is specified. :type val_c_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 0 :param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information. :type start: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1000 :param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000. :type limit: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` id :param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. :type sort: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` asc :param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'. :type dir: Array of String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param select: The list of attributes to return for each AuthUser. Valid values are id, user_name, email, notes, created_at, updated_at, first_name, last_name, is_system, last_login, expiration, consecutive_failed_logins, account_locked, account_locked_date, account_disabled, account_disabled_date, cli_creds_enabled_ind, password_secure, password_version, cli_user_name_secure, cli_password_secure, cli_enable_password_secure, secure_version, auth_service_id, force_local_ind, last_local_authz_ind, cert, db_creds_enabled_ind, db_username, db_password_secure. If empty or omitted, all attributes will be returned. :type select: Array | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_field: The field name for NIOS GOTO that is used for locating a row position of records. :type goto_field: String | ``api version min:`` 2.8 | ``api version max:`` None | ``required:`` False | ``default:`` None :param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records. :type goto_value: String | ``api version min:`` 2.3 | ``api version max:`` None | ``required:`` False | ``default:`` None :param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering. :type xml_filter: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_users: An array of the AuthUser objects that match the specified input criteria. :rtype auth_users: Array of AuthUser """ return self.api_list_request(self._get_method_fullname("find"), kwargs) def create(self, **kwargs): """Creates a new auth user. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param account_disabled: A flag indicating whether this user's account has been administratively disabled. :type account_disabled: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` 1 :param auth_service_id: The id of the last authentication service where this user was authenticated. :type auth_service_id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cert: Client Certificate stored on client success authorization when CAC is enabled :type cert: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` False :param cli_creds_enabled_ind: A flag indicating whether or not to use this user's individual CLI credentials for device interaction. :type cli_creds_enabled_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` :param cli_enable_password: No description is available for cli_enable_password. :type cli_enable_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` :param cli_password: No description is available for cli_password. :type cli_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` :param cli_user_name: No description is available for cli_user_name. :type cli_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param email: The user's email address. :type email: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param first_name: The user's first name. :type first_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` True :param force_local_ind: A flag indicating whether user is forced to use local authorization or not. :type force_local_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_local_authz_ind: The source where the last authorization came from. May be 0 - Remote, 1 - Local, 2 - Forced Local :type last_local_authz_ind: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param last_name: The user's last name. :type last_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param notes: Notes on the user, was entered by the administrator. :type notes: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param password: The user's password (required for local authentication). :type password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param user_name: The user's login name. :type user_name: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: The id of the newly created auth user. :rtype id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return model: The class name of the newly created auth user. :rtype model: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return uri: A URI that may be used to retrieve the newly created auth user. :rtype uri: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: The newly created auth user. :rtype auth_user: AuthUser """ return self.api_request(self._get_method_fullname("create"), kwargs) def update(self, **kwargs): """Updates an existing auth user. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param account_disabled: A flag indicating whether this user's account has been administratively disabled. If omitted, this field will not be updated. :type account_disabled: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param auth_service_id: The id of the last authentication service where this user was authenticated. If omitted, this field will not be updated. :type auth_service_id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cert: Client Certificate stored on client success authorization when CAC is enabled If omitted, this field will not be updated. :type cert: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_creds_enabled_ind: A flag indicating whether or not to use this user's individual CLI credentials for device interaction. If omitted, this field will not be updated. :type cli_creds_enabled_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_enable_password: No description is available for cli_enable_password. If omitted, this field will not be updated. :type cli_enable_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_password: No description is available for cli_password. If omitted, this field will not be updated. :type cli_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_user_name: No description is available for cli_user_name. If omitted, this field will not be updated. :type cli_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param db_creds_enabled_ind: A flag which indicates that the user has database credentials enabled. If omitted, this field will not be updated. :type db_creds_enabled_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param db_password: No description is available for db_password. If omitted, this field will not be updated. :type db_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param db_username: Username for MySQL Database. If omitted, this field will not be updated. :type db_username: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param email: The user's email address. If omitted, this field will not be updated. :type email: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param first_name: The user's first name. If omitted, this field will not be updated. :type first_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param force_local_ind: A flag indicating whether user is forced to use local authorization or not. If omitted, this field will not be updated. :type force_local_ind: Boolean | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_local_authz_ind: The source where the last authorization came from. May be 0 - Remote, 1 - Local, 2 - Forced Local If omitted, this field will not be updated. :type last_local_authz_ind: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param last_name: The user's last name. If omitted, this field will not be updated. :type last_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param notes: Notes on the user, was entered by the administrator. If omitted, this field will not be updated. :type notes: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param password: The user's password (required for local authentication). If omitted, this field will not be updated. :type password: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: The id of the updated auth user. :rtype id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return model: The class name of the updated auth user. :rtype model: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return uri: A URI that may be used to retrieve the updated auth user. :rtype uri: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: The updated auth user. :rtype auth_user: AuthUser """ return self.api_request(self._get_method_fullname("update"), kwargs) def update_own_cli_credentials(self, **kwargs): """Update CLI terminal credentials for current user. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_creds_enabled_ind: None :type cli_creds_enabled_ind: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_user_name: None :type cli_user_name: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_password: None :type cli_password: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :param cli_enable_password: None :type cli_enable_password: String **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return id: None :rtype id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return model: None :rtype model: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return uri: None :rtype uri: String | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: None :rtype auth_user: AuthUser """ return self.api_request(self._get_method_fullname("update_own_cli_credentials"), kwargs) def destroy(self, **kwargs): """Deletes the specified auth user from NetMRI. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for this user. :type id: Integer **Outputs** """ return self.api_request(self._get_method_fullname("destroy"), kwargs) def auth_roles(self, **kwargs): """Returns the roles associated with the user, and the device groups to which they apply. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for the user. :type id: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_roles: The roles the assigned to the user, along with an extra 'device_group_ids' attribute, listing the device groups for which the role applies. :rtype auth_roles: Array of AuthRole """ return self.api_request(self._get_method_fullname("auth_roles"), kwargs) def add_auth_role(self, **kwargs): """Assigns a specified role to a user within a specified list of device groups. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for the user. :type id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param auth_role_id: The internal NetMRI identifier for the user role. :type auth_role_id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param device_group_ids: The internal NetMRI identifiers for the device groups in which to assign the role to this user. A value of 0 represents all groups. The user will be assigned the role for the groups listed here for which the currently authenticated user has user_admin privileges. The role will be revoked for those groups that the authenticated user has 'user_admin' privileges, but are not listed. :type device_group_ids: Array of Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: The user to which the role has been assigned. :rtype auth_user: AuthUser | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_role: The roles which has been assigned. :rtype auth_role: AuthUser | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return device_group_ids: The current list of device group IDs assigned for this role and user, subject to the permissions of the authenticated user. :rtype device_group_ids: Array of DeviceGroup """ return self.api_request(self._get_method_fullname("add_auth_role"), kwargs) def remove_auth_role(self, **kwargs): """Removes a specified role from a user for all device groups for which the current user has 'user_admin' privilege. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier for the user. :type id: Integer | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param auth_role_id: The internal NetMRI identifier for the user role. :type auth_role_id: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_user: The user from which the role has been removed. :rtype auth_user: AuthUser | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_role: The role which has been removed. :rtype auth_role: AuthUser """ return self.api_request(self._get_method_fullname("remove_auth_role"), kwargs)
apache-2.0
mmazanec22/too-windy
env/lib/python3.5/site-packages/paramiko/__init__.py
44
3922
# Copyright (C) 2003-2011 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. import sys from paramiko._version import __version__, __version_info__ if sys.version_info < (2, 6): raise RuntimeError('You need Python 2.6+ for this module.') __author__ = "Jeff Forcier <jeff@bitprophet.org>" __license__ = "GNU Lesser General Public License (LGPL)" from paramiko.transport import SecurityOptions, Transport from paramiko.client import SSHClient, MissingHostKeyPolicy, AutoAddPolicy, RejectPolicy, WarningPolicy from paramiko.auth_handler import AuthHandler from paramiko.ssh_gss import GSSAuth, GSS_AUTH_AVAILABLE from paramiko.channel import Channel, ChannelFile from paramiko.ssh_exception import SSHException, PasswordRequiredException, \ BadAuthenticationType, ChannelException, BadHostKeyException, \ AuthenticationException, ProxyCommandFailure from paramiko.server import ServerInterface, SubsystemHandler, InteractiveQuery from paramiko.rsakey import RSAKey from paramiko.dsskey import DSSKey from paramiko.ecdsakey import ECDSAKey from paramiko.sftp import SFTPError, BaseSFTP from paramiko.sftp_client import SFTP, SFTPClient from paramiko.sftp_server import SFTPServer from paramiko.sftp_attr import SFTPAttributes from paramiko.sftp_handle import SFTPHandle from paramiko.sftp_si import SFTPServerInterface from paramiko.sftp_file import SFTPFile from paramiko.message import Message from paramiko.packet import Packetizer from paramiko.file import BufferedFile from paramiko.agent import Agent, AgentKey from paramiko.pkey import PKey from paramiko.hostkeys import HostKeys from paramiko.config import SSHConfig from paramiko.proxy import ProxyCommand from paramiko.common import AUTH_SUCCESSFUL, AUTH_PARTIALLY_SUCCESSFUL, AUTH_FAILED, \ OPEN_SUCCEEDED, OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED, OPEN_FAILED_CONNECT_FAILED, \ OPEN_FAILED_UNKNOWN_CHANNEL_TYPE, OPEN_FAILED_RESOURCE_SHORTAGE from paramiko.sftp import SFTP_OK, SFTP_EOF, SFTP_NO_SUCH_FILE, SFTP_PERMISSION_DENIED, SFTP_FAILURE, \ SFTP_BAD_MESSAGE, SFTP_NO_CONNECTION, SFTP_CONNECTION_LOST, SFTP_OP_UNSUPPORTED from paramiko.common import io_sleep __all__ = [ 'Transport', 'SSHClient', 'MissingHostKeyPolicy', 'AutoAddPolicy', 'RejectPolicy', 'WarningPolicy', 'SecurityOptions', 'SubsystemHandler', 'Channel', 'PKey', 'RSAKey', 'DSSKey', 'Message', 'SSHException', 'AuthenticationException', 'PasswordRequiredException', 'BadAuthenticationType', 'ChannelException', 'BadHostKeyException', 'ProxyCommand', 'ProxyCommandFailure', 'SFTP', 'SFTPFile', 'SFTPHandle', 'SFTPClient', 'SFTPServer', 'SFTPError', 'SFTPAttributes', 'SFTPServerInterface', 'ServerInterface', 'BufferedFile', 'Agent', 'AgentKey', 'HostKeys', 'SSHConfig', 'util', 'io_sleep' ]
gpl-3.0
tmpkus/photivo
scons-local-2.2.0/SCons/Conftest.py
29
27705
"""SCons.Conftest Autoconf-like configuration support; low level implementation of tests. """ # # Copyright (c) 2003 Stichting NLnet Labs # Copyright (c) 2001, 2002, 2003 Steven Knight # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # # The purpose of this module is to define how a check is to be performed. # Use one of the Check...() functions below. # # # A context class is used that defines functions for carrying out the tests, # logging and messages. The following methods and members must be present: # # context.Display(msg) Function called to print messages that are normally # displayed for the user. Newlines are explicitly used. # The text should also be written to the logfile! # # context.Log(msg) Function called to write to a log file. # # context.BuildProg(text, ext) # Function called to build a program, using "ext" for the # file extention. Must return an empty string for # success, an error message for failure. # For reliable test results building should be done just # like an actual program would be build, using the same # command and arguments (including configure results so # far). # # context.CompileProg(text, ext) # Function called to compile a program, using "ext" for # the file extention. Must return an empty string for # success, an error message for failure. # For reliable test results compiling should be done just # like an actual source file would be compiled, using the # same command and arguments (including configure results # so far). # # context.AppendLIBS(lib_name_list) # Append "lib_name_list" to the value of LIBS. # "lib_namelist" is a list of strings. # Return the value of LIBS before changing it (any type # can be used, it is passed to SetLIBS() later.) # # context.PrependLIBS(lib_name_list) # Prepend "lib_name_list" to the value of LIBS. # "lib_namelist" is a list of strings. # Return the value of LIBS before changing it (any type # can be used, it is passed to SetLIBS() later.) # # context.SetLIBS(value) # Set LIBS to "value". The type of "value" is what # AppendLIBS() returned. # Return the value of LIBS before changing it (any type # can be used, it is passed to SetLIBS() later.) # # context.headerfilename # Name of file to append configure results to, usually # "confdefs.h". # The file must not exist or be empty when starting. # Empty or None to skip this (some tests will not work!). # # context.config_h (may be missing). If present, must be a string, which # will be filled with the contents of a config_h file. # # context.vardict Dictionary holding variables used for the tests and # stores results from the tests, used for the build # commands. # Normally contains "CC", "LIBS", "CPPFLAGS", etc. # # context.havedict Dictionary holding results from the tests that are to # be used inside a program. # Names often start with "HAVE_". These are zero # (feature not present) or one (feature present). Other # variables may have any value, e.g., "PERLVERSION" can # be a number and "SYSTEMNAME" a string. # import re from types import IntType # # PUBLIC VARIABLES # LogInputFiles = 1 # Set that to log the input files in case of a failed test LogErrorMessages = 1 # Set that to log Conftest-generated error messages # # PUBLIC FUNCTIONS # # Generic remarks: # - When a language is specified which is not supported the test fails. The # message is a bit different, because not all the arguments for the normal # message are available yet (chicken-egg problem). def CheckBuilder(context, text = None, language = None): """ Configure check to see if the compiler works. Note that this uses the current value of compiler and linker flags, make sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. "language" should be "C" or "C++" and is used to select the compiler. Default is "C". "text" may be used to specify the code to be build. Returns an empty string for success, an error message for failure. """ lang, suffix, msg = _lang2suffix(language) if msg: context.Display("%s\n" % msg) return msg if not text: text = """ int main() { return 0; } """ context.Display("Checking if building a %s file works... " % lang) ret = context.BuildProg(text, suffix) _YesNoResult(context, ret, None, text) return ret def CheckCC(context): """ Configure check for a working C compiler. This checks whether the C compiler, as defined in the $CC construction variable, can compile a C source file. It uses the current $CCCOM value too, so that it can test against non working flags. """ context.Display("Checking whether the C compiler works") text = """ int main() { return 0; } """ ret = _check_empty_program(context, 'CC', text, 'C') _YesNoResult(context, ret, None, text) return ret def CheckSHCC(context): """ Configure check for a working shared C compiler. This checks whether the C compiler, as defined in the $SHCC construction variable, can compile a C source file. It uses the current $SHCCCOM value too, so that it can test against non working flags. """ context.Display("Checking whether the (shared) C compiler works") text = """ int foo() { return 0; } """ ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True) _YesNoResult(context, ret, None, text) return ret def CheckCXX(context): """ Configure check for a working CXX compiler. This checks whether the CXX compiler, as defined in the $CXX construction variable, can compile a CXX source file. It uses the current $CXXCOM value too, so that it can test against non working flags. """ context.Display("Checking whether the C++ compiler works") text = """ int main() { return 0; } """ ret = _check_empty_program(context, 'CXX', text, 'C++') _YesNoResult(context, ret, None, text) return ret def CheckSHCXX(context): """ Configure check for a working shared CXX compiler. This checks whether the CXX compiler, as defined in the $SHCXX construction variable, can compile a CXX source file. It uses the current $SHCXXCOM value too, so that it can test against non working flags. """ context.Display("Checking whether the (shared) C++ compiler works") text = """ int main() { return 0; } """ ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True) _YesNoResult(context, ret, None, text) return ret def _check_empty_program(context, comp, text, language, use_shared = False): """Return 0 on success, 1 otherwise.""" if comp not in context.env or not context.env[comp]: # The compiler construction variable is not set or empty return 1 lang, suffix, msg = _lang2suffix(language) if msg: return 1 if use_shared: return context.CompileSharedObject(text, suffix) else: return context.CompileProg(text, suffix) def CheckFunc(context, function_name, header = None, language = None): """ Configure check for a function "function_name". "language" should be "C" or "C++" and is used to select the compiler. Default is "C". Optional "header" can be defined to define a function prototype, include a header file or anything else that comes before main(). Sets HAVE_function_name in context.havedict according to the result. Note that this uses the current value of compiler and linker flags, make sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. Returns an empty string for success, an error message for failure. """ # Remarks from autoconf: # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h> # which includes <sys/select.h> which contains a prototype for select. # Similarly for bzero. # - assert.h is included to define __stub macros and hopefully few # prototypes, which can conflict with char $1(); below. # - Override any gcc2 internal prototype to avoid an error. # - We use char for the function declaration because int might match the # return type of a gcc2 builtin and then its argument prototype would # still apply. # - The GNU C library defines this for functions which it implements to # always fail with ENOSYS. Some functions are actually named something # starting with __ and the normal name is an alias. if context.headerfilename: includetext = '#include "%s"' % context.headerfilename else: includetext = '' if not header: header = """ #ifdef __cplusplus extern "C" #endif char %s();""" % function_name lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for %s(): %s\n" % (function_name, msg)) return msg text = """ %(include)s #include <assert.h> %(hdr)s int main() { #if defined (__stub_%(name)s) || defined (__stub___%(name)s) fail fail fail #else %(name)s(); #endif return 0; } """ % { 'name': function_name, 'include': includetext, 'hdr': header } context.Display("Checking for %s function %s()... " % (lang, function_name)) ret = context.BuildProg(text, suffix) _YesNoResult(context, ret, "HAVE_" + function_name, text, "Define to 1 if the system has the function `%s'." %\ function_name) return ret def CheckHeader(context, header_name, header = None, language = None, include_quotes = None): """ Configure check for a C or C++ header file "header_name". Optional "header" can be defined to do something before including the header file (unusual, supported for consistency). "language" should be "C" or "C++" and is used to select the compiler. Default is "C". Sets HAVE_header_name in context.havedict according to the result. Note that this uses the current value of compiler and linker flags, make sure $CFLAGS and $CPPFLAGS are set correctly. Returns an empty string for success, an error message for failure. """ # Why compile the program instead of just running the preprocessor? # It is possible that the header file exists, but actually using it may # fail (e.g., because it depends on other header files). Thus this test is # more strict. It may require using the "header" argument. # # Use <> by default, because the check is normally used for system header # files. SCons passes '""' to overrule this. # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. if context.headerfilename: includetext = '#include "%s"\n' % context.headerfilename else: includetext = '' if not header: header = "" lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for header file %s: %s\n" % (header_name, msg)) return msg if not include_quotes: include_quotes = "<>" text = "%s%s\n#include %s%s%s\n\n" % (includetext, header, include_quotes[0], header_name, include_quotes[1]) context.Display("Checking for %s header file %s... " % (lang, header_name)) ret = context.CompileProg(text, suffix) _YesNoResult(context, ret, "HAVE_" + header_name, text, "Define to 1 if you have the <%s> header file." % header_name) return ret def CheckType(context, type_name, fallback = None, header = None, language = None): """ Configure check for a C or C++ type "type_name". Optional "header" can be defined to include a header file. "language" should be "C" or "C++" and is used to select the compiler. Default is "C". Sets HAVE_type_name in context.havedict according to the result. Note that this uses the current value of compiler and linker flags, make sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. Returns an empty string for success, an error message for failure. """ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. if context.headerfilename: includetext = '#include "%s"' % context.headerfilename else: includetext = '' if not header: header = "" lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) return msg # Remarks from autoconf about this test: # - Grepping for the type in include files is not reliable (grep isn't # portable anyway). # - Using "TYPE my_var;" doesn't work for const qualified types in C++. # Adding an initializer is not valid for some C++ classes. # - Using the type as parameter to a function either fails for K&$ C or for # C++. # - Using "TYPE *my_var;" is valid in C for some types that are not # declared (struct something). # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable. # - Using the previous two together works reliably. text = """ %(include)s %(header)s int main() { if ((%(name)s *) 0) return 0; if (sizeof (%(name)s)) return 0; } """ % { 'include': includetext, 'header': header, 'name': type_name } context.Display("Checking for %s type %s... " % (lang, type_name)) ret = context.BuildProg(text, suffix) _YesNoResult(context, ret, "HAVE_" + type_name, text, "Define to 1 if the system has the type `%s'." % type_name) if ret and fallback and context.headerfilename: f = open(context.headerfilename, "a") f.write("typedef %s %s;\n" % (fallback, type_name)) f.close() return ret def CheckTypeSize(context, type_name, header = None, language = None, expect = None): """This check can be used to get the size of a given type, or to check whether the type is of expected size. Arguments: - type : str the type to check - includes : sequence list of headers to include in the test code before testing the type - language : str 'C' or 'C++' - expect : int if given, will test wether the type has the given number of bytes. If not given, will automatically find the size. Returns: status : int 0 if the check failed, or the found size of the type if the check succeeded.""" # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. if context.headerfilename: includetext = '#include "%s"' % context.headerfilename else: includetext = '' if not header: header = "" lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for %s type: %s\n" % (type_name, msg)) return msg src = includetext + header if not expect is None: # Only check if the given size is the right one context.Display('Checking %s is %d bytes... ' % (type_name, expect)) # test code taken from autoconf: this is a pretty clever hack to find that # a type is of a given size using only compilation. This speeds things up # quite a bit compared to straightforward code using TryRun src = src + r""" typedef %s scons_check_type; int main() { static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)]; test_array[0] = 0; return 0; } """ st = context.CompileProg(src % (type_name, expect), suffix) if not st: context.Display("yes\n") _Have(context, "SIZEOF_%s" % type_name, expect, "The size of `%s', as computed by sizeof." % type_name) return expect else: context.Display("no\n") _LogFailed(context, src, st) return 0 else: # Only check if the given size is the right one context.Message('Checking size of %s ... ' % type_name) # We have to be careful with the program we wish to test here since # compilation will be attempted using the current environment's flags. # So make sure that the program will compile without any warning. For # example using: 'int main(int argc, char** argv)' will fail with the # '-Wall -Werror' flags since the variables argc and argv would not be # used in the program... # src = src + """ #include <stdlib.h> #include <stdio.h> int main() { printf("%d", (int)sizeof(""" + type_name + """)); return 0; } """ st, out = context.RunProg(src, suffix) try: size = int(out) except ValueError: # If cannot convert output of test prog to an integer (the size), # something went wront, so just fail st = 1 size = 0 if not st: context.Display("yes\n") _Have(context, "SIZEOF_%s" % type_name, size, "The size of `%s', as computed by sizeof." % type_name) return size else: context.Display("no\n") _LogFailed(context, src, st) return 0 return 0 def CheckDeclaration(context, symbol, includes = None, language = None): """Checks whether symbol is declared. Use the same test as autoconf, that is test whether the symbol is defined as a macro or can be used as an r-value. Arguments: symbol : str the symbol to check includes : str Optional "header" can be defined to include a header file. language : str only C and C++ supported. Returns: status : bool True if the check failed, False if succeeded.""" # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. if context.headerfilename: includetext = '#include "%s"' % context.headerfilename else: includetext = '' if not includes: includes = "" lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for declaration %s: %s\n" % (symbol, msg)) return msg src = includetext + includes context.Display('Checking whether %s is declared... ' % symbol) src = src + r""" int main() { #ifndef %s (void) %s; #endif ; return 0; } """ % (symbol, symbol) st = context.CompileProg(src, suffix) _YesNoResult(context, st, "HAVE_DECL_" + symbol, src, "Set to 1 if %s is defined." % symbol) return st def CheckLib(context, libs, func_name = None, header = None, extra_libs = None, call = None, language = None, autoadd = 1, append = True): """ Configure check for a C or C++ libraries "libs". Searches through the list of libraries, until one is found where the test succeeds. Tests if "func_name" or "call" exists in the library. Note: if it exists in another library the test succeeds anyway! Optional "header" can be defined to include a header file. If not given a default prototype for "func_name" is added. Optional "extra_libs" is a list of library names to be added after "lib_name" in the build command. To be used for libraries that "lib_name" depends on. Optional "call" replaces the call to "func_name" in the test code. It must consist of complete C statements, including a trailing ";". Both "func_name" and "call" arguments are optional, and in that case, just linking against the libs is tested. "language" should be "C" or "C++" and is used to select the compiler. Default is "C". Note that this uses the current value of compiler and linker flags, make sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly. Returns an empty string for success, an error message for failure. """ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H. if context.headerfilename: includetext = '#include "%s"' % context.headerfilename else: includetext = '' if not header: header = "" text = """ %s %s""" % (includetext, header) # Add a function declaration if needed. if func_name and func_name != "main": if not header: text = text + """ #ifdef __cplusplus extern "C" #endif char %s(); """ % func_name # The actual test code. if not call: call = "%s();" % func_name # if no function to test, leave main() blank text = text + """ int main() { %s return 0; } """ % (call or "") if call: i = call.find("\n") if i > 0: calltext = call[:i] + ".." elif call[-1] == ';': calltext = call[:-1] else: calltext = call for lib_name in libs: lang, suffix, msg = _lang2suffix(language) if msg: context.Display("Cannot check for library %s: %s\n" % (lib_name, msg)) return msg # if a function was specified to run in main(), say it if call: context.Display("Checking for %s in %s library %s... " % (calltext, lang, lib_name)) # otherwise, just say the name of library and language else: context.Display("Checking for %s library %s... " % (lang, lib_name)) if lib_name: l = [ lib_name ] if extra_libs: l.extend(extra_libs) if append: oldLIBS = context.AppendLIBS(l) else: oldLIBS = context.PrependLIBS(l) sym = "HAVE_LIB" + lib_name else: oldLIBS = -1 sym = None ret = context.BuildProg(text, suffix) _YesNoResult(context, ret, sym, text, "Define to 1 if you have the `%s' library." % lib_name) if oldLIBS != -1 and (ret or not autoadd): context.SetLIBS(oldLIBS) if not ret: return ret return ret # # END OF PUBLIC FUNCTIONS # def _YesNoResult(context, ret, key, text, comment = None): """ Handle the result of a test with a "yes" or "no" result. "ret" is the return value: empty if OK, error message when not. "key" is the name of the symbol to be defined (HAVE_foo). "text" is the source code of the program used for testing. "comment" is the C comment to add above the line defining the symbol (the comment is automatically put inside a /* */). If None, no comment is added. """ if key: _Have(context, key, not ret, comment) if ret: context.Display("no\n") _LogFailed(context, text, ret) else: context.Display("yes\n") def _Have(context, key, have, comment = None): """ Store result of a test in context.havedict and context.headerfilename. "key" is a "HAVE_abc" name. It is turned into all CAPITALS and non- alphanumerics are replaced by an underscore. The value of "have" can be: 1 - Feature is defined, add "#define key". 0 - Feature is not defined, add "/* #undef key */". Adding "undef" is what autoconf does. Not useful for the compiler, but it shows that the test was done. number - Feature is defined to this number "#define key have". Doesn't work for 0 or 1, use a string then. string - Feature is defined to this string "#define key have". Give "have" as is should appear in the header file, include quotes when desired and escape special characters! """ key_up = key.upper() key_up = re.sub('[^A-Z0-9_]', '_', key_up) context.havedict[key_up] = have if have == 1: line = "#define %s 1\n" % key_up elif have == 0: line = "/* #undef %s */\n" % key_up elif isinstance(have, IntType): line = "#define %s %d\n" % (key_up, have) else: line = "#define %s %s\n" % (key_up, str(have)) if comment is not None: lines = "\n/* %s */\n" % comment + line else: lines = "\n" + line if context.headerfilename: f = open(context.headerfilename, "a") f.write(lines) f.close() elif hasattr(context,'config_h'): context.config_h = context.config_h + lines def _LogFailed(context, text, msg): """ Write to the log about a failed program. Add line numbers, so that error messages can be understood. """ if LogInputFiles: context.Log("Failed program was:\n") lines = text.split('\n') if len(lines) and lines[-1] == '': lines = lines[:-1] # remove trailing empty line n = 1 for line in lines: context.Log("%d: %s\n" % (n, line)) n = n + 1 if LogErrorMessages: context.Log("Error message: %s\n" % msg) def _lang2suffix(lang): """ Convert a language name to a suffix. When "lang" is empty or None C is assumed. Returns a tuple (lang, suffix, None) when it works. For an unrecognized language returns (None, None, msg). Where: lang = the unified language name suffix = the suffix, including the leading dot msg = an error message """ if not lang or lang in ["C", "c"]: return ("C", ".c", None) if lang in ["c++", "C++", "cpp", "CXX", "cxx"]: return ("C++", ".cpp", None) return None, None, "Unsupported language: %s" % lang # vim: set sw=4 et sts=4 tw=79 fo+=l: # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
gpl-3.0
manageyp/xhtml2pdf
ez_setup.py
92
9669
#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import sys DEFAULT_VERSION = "0.6c9" DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3] md5_data = { 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20', 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab', 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53', 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2', 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e', 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372', 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902', 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de', 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b', } import sys, os try: from hashlib import md5 except ImportError: from md5 import md5 def _validate_md5(egg_name, data): if egg_name in md5_data: digest = md5(data).hexdigest() if digest != md5_data[egg_name]: print >> sys.stderr, ( "md5 validation of %s failed! (Possible download problem?)" % egg_name ) sys.exit(2) return data def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15 ): """Automatically find/download setuptools and make it available on sys.path `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where setuptools will be downloaded, if it is not already available. If `download_delay` is specified, it should be the number of seconds that will be paused before initiating a download, should one be required. If an older version of setuptools is installed, this routine will print a message to ``sys.stderr`` and raise SystemExit in an attempt to abort the calling script. """ was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules def do_download(): egg = download_setuptools(version, download_base, to_dir, download_delay) sys.path.insert(0, egg) import setuptools; setuptools.bootstrap_install_from = egg try: import pkg_resources except ImportError: return do_download() try: pkg_resources.require("setuptools>=" + version); return except pkg_resources.VersionConflict, e: if was_imported: print >> sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" "can't be installed while this script is running. Please install\n" " a more recent version first, using 'easy_install -U setuptools'." "\n\n(Currently using %r)" ) % (version, e.args[0]) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return do_download() except pkg_resources.DistributionNotFound: return do_download() def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15 ): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ import urllib2, shutil egg_name = "setuptools-%s-py%s.egg" % (version, sys.version[:3]) url = download_base + egg_name saveto = os.path.join(to_dir, egg_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: from distutils import log if delay: log.warn(""" --------------------------------------------------------------------------- This script requires setuptools version %s to run (even to display help). I will attempt to download it for you (from %s), but you may need to enable firewall access for this script first. I will start the download in %d seconds. (Note: if this machine does not have network access, please obtain the file %s and place it in this directory before rerunning this script.) ---------------------------------------------------------------------------""", version, download_base, delay, url ); from time import sleep; sleep(delay) log.warn("Downloading %s", url) src = urllib2.urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = _validate_md5(egg_name, src.read()) dst = open(saveto, "wb"); dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" try: import setuptools except ImportError: egg = None try: egg = download_setuptools(version, delay=0) sys.path.insert(0, egg) from setuptools.command.easy_install import main return main(list(argv) + [egg]) # we're done here finally: if egg and os.path.exists(egg): os.unlink(egg) else: if setuptools.__version__ == '0.0.1': print >> sys.stderr, ( "You have an obsolete version of setuptools installed. Please\n" "remove it from your system entirely before rerunning this script." ) sys.exit(2) req = "setuptools>=" + version import pkg_resources try: pkg_resources.require(req) except pkg_resources.VersionConflict: try: from setuptools.command.easy_install import main except ImportError: from easy_install import main main(list(argv) + [download_setuptools(delay=0)]) sys.exit(0) # try to force an exit else: if argv: from setuptools.command.easy_install import main main(argv) else: print "Setuptools version", version, "or greater has been installed." print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' def update_md5(filenames): """Update our built-in md5 registry""" import re for name in filenames: base = os.path.basename(name) f = open(name, 'rb') md5_data[base] = md5(f.read()).hexdigest() f.close() data = [" %r: %r,\n" % it for it in md5_data.items()] data.sort() repl = "".join(data) import inspect srcfile = inspect.getsourcefile(sys.modules[__name__]) f = open(srcfile, 'rb'); src = f.read(); f.close() match = re.search("\nmd5_data = {\n([^}]+)}", src) if not match: print >> sys.stderr, "Internal error!" sys.exit(2) src = src[:match.start(1)] + repl + src[match.end(1):] f = open(srcfile, 'w') f.write(src) f.close() if __name__ == '__main__': if len(sys.argv) > 2 and sys.argv[1] == '--md5update': update_md5(sys.argv[2:]) else: main(sys.argv[1:])
apache-2.0
eduNEXT/edx-platform
openedx/core/djangoapps/content/learning_sequences/api/processors/enrollment_track_partition_groups.py
3
3159
# lint-amnesty, pylint: disable=missing-module-docstring import logging from xmodule.partitions.enrollment_track_partition_generator import ( create_enrollment_track_partition_with_course_id ) from xmodule.partitions.partitions import ( ENROLLMENT_TRACK_PARTITION_ID, ) from xmodule.partitions.partitions_service import get_user_partition_groups from .base import OutlineProcessor log = logging.getLogger(__name__) class EnrollmentTrackPartitionGroupsOutlineProcessor(OutlineProcessor): """ Processor for applying all enrollment track user partition groups. Confining the processor to only EnrollmentTrack user partition is a significant limitation. Nonetheless, it is a step towards the goal of supporting all partition schemes in the future. """ def __init__(self, course_key, user, at_time): super().__init__(course_key, user, at_time) self.enrollment_track_groups = {} self.user_group = None def load_data(self): """ Pull track groups for this course and which group the user is in. """ user_partition = create_enrollment_track_partition_with_course_id(self.course_key) self.enrollment_track_groups = get_user_partition_groups( self.course_key, [user_partition], self.user, partition_dict_key='id' ) self.user_group = self.enrollment_track_groups.get(ENROLLMENT_TRACK_PARTITION_ID) def _is_user_excluded_by_partition_group(self, user_partition_groups): """ Is the user part of the group to which the block is restricting content? """ if not user_partition_groups: return False groups = user_partition_groups.get(ENROLLMENT_TRACK_PARTITION_ID) if not groups: return False if self.user_group and self.user_group.id not in groups: # If the user's partition group, say Masters, # does not belong to the partition of the block, say [verified], # the block should be removed return True return False def usage_keys_to_remove(self, full_course_outline): """ Content group exclusions remove the content entirely. If you're in the Audit track, there are things in the Verified track that you don't even know exists. This processor always removes things entirely instead of making them visible-but-inaccessible (like ScheduleOutlineProcessor does). """ removed_usage_keys = set() for section in full_course_outline.sections: remove_all_children = False if self._is_user_excluded_by_partition_group( section.user_partition_groups ): removed_usage_keys.add(section.usage_key) remove_all_children = True for seq in section.sequences: if remove_all_children or self._is_user_excluded_by_partition_group( seq.user_partition_groups ): removed_usage_keys.add(seq.usage_key) return removed_usage_keys
agpl-3.0
Trust-Code/PySPED
pysped/nfe/__init__.py
6
1904
# -*- coding: utf-8 -*- # # PySPED - Python libraries to deal with Brazil's SPED Project # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira at tauga.com.br> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Library General Public License as # published by the Free Software Foundation, either version 2.1 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU Library General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # PySPED - Bibliotecas Python para o # SPED - Sistema Público de Escrituração Digital # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira arroba tauga.com.br> # # Este programa é um software livre: você pode redistribuir e/ou modificar # este programa sob os termos da licença GNU Library General Public License, # publicada pela Free Software Foundation, em sua versão 2.1 ou, de acordo # com sua opção, qualquer versão posterior. # # Este programa é distribuido na esperança de que venha a ser útil, # porém SEM QUAISQUER GARANTIAS, nem mesmo a garantia implícita de # COMERCIABILIDADE ou ADEQUAÇÃO A UMA FINALIDADE ESPECÍFICA. Veja a # GNU Library General Public License para mais detalhes. # # Você deve ter recebido uma cópia da GNU Library General Public License # juntamente com este programa. Caso esse não seja o caso, acesse: # <http://www.gnu.org/licenses/> # from .processador_nfe import ProcessadorNFe, DANFE, Certificado, DAEDE # Pyflakes ProcessadorNFe DANFE Certificado
lgpl-2.1
synnick/roboronya
roboronya/plugins/cholify.py
1
1871
import random from roboronya.plugins.plugin import Plugin class Cholificator(Plugin): description = 'Roboronya will use her *Automated Cholification Algorithm* (Patent Pending) to translate your text to a more sophisticated language.' name = 'cholify' @Plugin.requires_args def run(roboronya, conv, cmd_args, **kwargs): def _cholify(words): choloWords = [] for word in words: choloWord = '' oldChar = '' for char in word.lower(): if char == 'y': choloWord += 'ii' elif char == 't': choloWord += 'th' elif char == 'u' and (oldChar == 'q'): choloWord += random.choice(['kh', 'k']) elif (char == 'i' or char == 'e') and oldChar == 'c': choloWord = choloWord[:-1] choloWord += random.choice(['s', 'z']) + char elif char == 'h' and oldChar == 'c': choloWord = choloWord[:-1] choloWord += random.choice(['zh', 'sh']) elif char == 'c': choloWord += 'k' elif char == 's': choloWord += 'z' elif char == 'v': choloWord += 'b' elif char == 'b': choloWord += 'v' elif char == 'q': pass else: choloWord += char oldChar = char choloWords.append(choloWord) return choloWords return roboronya.send_message( conv, ' '.join(_cholify(cmd_args)), **kwargs )
mit
f0rki/cb-multios
original-challenges/Mixology/support/mixcodegen/markupsafe/_constants.py
1535
4795
# -*- coding: utf-8 -*- """ markupsafe._constants ~~~~~~~~~~~~~~~~~~~~~ Highlevel implementation of the Markup string. :copyright: (c) 2010 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ HTML_ENTITIES = { 'AElig': 198, 'Aacute': 193, 'Acirc': 194, 'Agrave': 192, 'Alpha': 913, 'Aring': 197, 'Atilde': 195, 'Auml': 196, 'Beta': 914, 'Ccedil': 199, 'Chi': 935, 'Dagger': 8225, 'Delta': 916, 'ETH': 208, 'Eacute': 201, 'Ecirc': 202, 'Egrave': 200, 'Epsilon': 917, 'Eta': 919, 'Euml': 203, 'Gamma': 915, 'Iacute': 205, 'Icirc': 206, 'Igrave': 204, 'Iota': 921, 'Iuml': 207, 'Kappa': 922, 'Lambda': 923, 'Mu': 924, 'Ntilde': 209, 'Nu': 925, 'OElig': 338, 'Oacute': 211, 'Ocirc': 212, 'Ograve': 210, 'Omega': 937, 'Omicron': 927, 'Oslash': 216, 'Otilde': 213, 'Ouml': 214, 'Phi': 934, 'Pi': 928, 'Prime': 8243, 'Psi': 936, 'Rho': 929, 'Scaron': 352, 'Sigma': 931, 'THORN': 222, 'Tau': 932, 'Theta': 920, 'Uacute': 218, 'Ucirc': 219, 'Ugrave': 217, 'Upsilon': 933, 'Uuml': 220, 'Xi': 926, 'Yacute': 221, 'Yuml': 376, 'Zeta': 918, 'aacute': 225, 'acirc': 226, 'acute': 180, 'aelig': 230, 'agrave': 224, 'alefsym': 8501, 'alpha': 945, 'amp': 38, 'and': 8743, 'ang': 8736, 'apos': 39, 'aring': 229, 'asymp': 8776, 'atilde': 227, 'auml': 228, 'bdquo': 8222, 'beta': 946, 'brvbar': 166, 'bull': 8226, 'cap': 8745, 'ccedil': 231, 'cedil': 184, 'cent': 162, 'chi': 967, 'circ': 710, 'clubs': 9827, 'cong': 8773, 'copy': 169, 'crarr': 8629, 'cup': 8746, 'curren': 164, 'dArr': 8659, 'dagger': 8224, 'darr': 8595, 'deg': 176, 'delta': 948, 'diams': 9830, 'divide': 247, 'eacute': 233, 'ecirc': 234, 'egrave': 232, 'empty': 8709, 'emsp': 8195, 'ensp': 8194, 'epsilon': 949, 'equiv': 8801, 'eta': 951, 'eth': 240, 'euml': 235, 'euro': 8364, 'exist': 8707, 'fnof': 402, 'forall': 8704, 'frac12': 189, 'frac14': 188, 'frac34': 190, 'frasl': 8260, 'gamma': 947, 'ge': 8805, 'gt': 62, 'hArr': 8660, 'harr': 8596, 'hearts': 9829, 'hellip': 8230, 'iacute': 237, 'icirc': 238, 'iexcl': 161, 'igrave': 236, 'image': 8465, 'infin': 8734, 'int': 8747, 'iota': 953, 'iquest': 191, 'isin': 8712, 'iuml': 239, 'kappa': 954, 'lArr': 8656, 'lambda': 955, 'lang': 9001, 'laquo': 171, 'larr': 8592, 'lceil': 8968, 'ldquo': 8220, 'le': 8804, 'lfloor': 8970, 'lowast': 8727, 'loz': 9674, 'lrm': 8206, 'lsaquo': 8249, 'lsquo': 8216, 'lt': 60, 'macr': 175, 'mdash': 8212, 'micro': 181, 'middot': 183, 'minus': 8722, 'mu': 956, 'nabla': 8711, 'nbsp': 160, 'ndash': 8211, 'ne': 8800, 'ni': 8715, 'not': 172, 'notin': 8713, 'nsub': 8836, 'ntilde': 241, 'nu': 957, 'oacute': 243, 'ocirc': 244, 'oelig': 339, 'ograve': 242, 'oline': 8254, 'omega': 969, 'omicron': 959, 'oplus': 8853, 'or': 8744, 'ordf': 170, 'ordm': 186, 'oslash': 248, 'otilde': 245, 'otimes': 8855, 'ouml': 246, 'para': 182, 'part': 8706, 'permil': 8240, 'perp': 8869, 'phi': 966, 'pi': 960, 'piv': 982, 'plusmn': 177, 'pound': 163, 'prime': 8242, 'prod': 8719, 'prop': 8733, 'psi': 968, 'quot': 34, 'rArr': 8658, 'radic': 8730, 'rang': 9002, 'raquo': 187, 'rarr': 8594, 'rceil': 8969, 'rdquo': 8221, 'real': 8476, 'reg': 174, 'rfloor': 8971, 'rho': 961, 'rlm': 8207, 'rsaquo': 8250, 'rsquo': 8217, 'sbquo': 8218, 'scaron': 353, 'sdot': 8901, 'sect': 167, 'shy': 173, 'sigma': 963, 'sigmaf': 962, 'sim': 8764, 'spades': 9824, 'sub': 8834, 'sube': 8838, 'sum': 8721, 'sup': 8835, 'sup1': 185, 'sup2': 178, 'sup3': 179, 'supe': 8839, 'szlig': 223, 'tau': 964, 'there4': 8756, 'theta': 952, 'thetasym': 977, 'thinsp': 8201, 'thorn': 254, 'tilde': 732, 'times': 215, 'trade': 8482, 'uArr': 8657, 'uacute': 250, 'uarr': 8593, 'ucirc': 251, 'ugrave': 249, 'uml': 168, 'upsih': 978, 'upsilon': 965, 'uuml': 252, 'weierp': 8472, 'xi': 958, 'yacute': 253, 'yen': 165, 'yuml': 255, 'zeta': 950, 'zwj': 8205, 'zwnj': 8204 }
mit
clovett/MissionPlanner
Lib/distutils/command/check.py
50
5375
"""distutils.command.check Implements the Distutils 'check' command. """ __revision__ = "$Id$" from distutils.core import Command from distutils.errors import DistutilsSetupError try: # docutils is installed from docutils.utils import Reporter from docutils.parsers.rst import Parser from docutils import frontend from docutils import nodes from StringIO import StringIO class SilentReporter(Reporter): def __init__(self, source, report_level, halt_level, stream=None, debug=0, encoding='ascii', error_handler='replace'): self.messages = [] Reporter.__init__(self, source, report_level, halt_level, stream, debug, encoding, error_handler) def system_message(self, level, message, *children, **kwargs): self.messages.append((level, message, children, kwargs)) HAS_DOCUTILS = True except ImportError: # docutils is not installed HAS_DOCUTILS = False class check(Command): """This command checks the meta-data of the package. """ description = ("perform some checks on the package") user_options = [('metadata', 'm', 'Verify meta-data'), ('restructuredtext', 'r', ('Checks if long string meta-data syntax ' 'are reStructuredText-compliant')), ('strict', 's', 'Will exit with an error if a check fails')] boolean_options = ['metadata', 'restructuredtext', 'strict'] def initialize_options(self): """Sets default values for options.""" self.restructuredtext = 0 self.metadata = 1 self.strict = 0 self._warnings = 0 def finalize_options(self): pass def warn(self, msg): """Counts the number of warnings that occurs.""" self._warnings += 1 return Command.warn(self, msg) def run(self): """Runs the command.""" # perform the various tests if self.metadata: self.check_metadata() if self.restructuredtext: if HAS_DOCUTILS: self.check_restructuredtext() elif self.strict: raise DistutilsSetupError('The docutils package is needed.') # let's raise an error in strict mode, if we have at least # one warning if self.strict and self._warnings > 0: raise DistutilsSetupError('Please correct your package.') def check_metadata(self): """Ensures that all required elements of meta-data are supplied. name, version, URL, (author and author_email) or (maintainer and maintainer_email)). Warns if any are missing. """ metadata = self.distribution.metadata missing = [] for attr in ('name', 'version', 'url'): if not (hasattr(metadata, attr) and getattr(metadata, attr)): missing.append(attr) if missing: self.warn("missing required meta-data: %s" % ', '.join(missing)) if metadata.author: if not metadata.author_email: self.warn("missing meta-data: if 'author' supplied, " + "'author_email' must be supplied too") elif metadata.maintainer: if not metadata.maintainer_email: self.warn("missing meta-data: if 'maintainer' supplied, " + "'maintainer_email' must be supplied too") else: self.warn("missing meta-data: either (author and author_email) " + "or (maintainer and maintainer_email) " + "must be supplied") def check_restructuredtext(self): """Checks if the long string fields are reST-compliant.""" data = self.distribution.get_long_description() for warning in self._check_rst_data(data): line = warning[-1].get('line') if line is None: warning = warning[1] else: warning = '%s (line %s)' % (warning[1], line) self.warn(warning) def _check_rst_data(self, data): """Returns warnings when the provided data doesn't compile.""" source_path = StringIO() parser = Parser() settings = frontend.OptionParser().get_default_values() settings.tab_width = 4 settings.pep_references = None settings.rfc_references = None reporter = SilentReporter(source_path, settings.report_level, settings.halt_level, stream=settings.warning_stream, debug=settings.debug, encoding=settings.error_encoding, error_handler=settings.error_encoding_error_handler) document = nodes.document(settings, reporter, source=source_path) document.note_source(source_path, -1) try: parser.parse(data, document) except AttributeError: reporter.messages.append((-1, 'Could not finish the parsing.', '', {})) return reporter.messages
gpl-3.0
geeag/kafka
tests/kafkatest/tests/core/reassign_partitions_test.py
4
5559
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ducktape.mark import parametrize from ducktape.utils.util import wait_until from kafkatest.services.zookeeper import ZookeeperService from kafkatest.services.kafka import KafkaService from kafkatest.services.verifiable_producer import VerifiableProducer from kafkatest.services.console_consumer import ConsoleConsumer from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest from kafkatest.utils import is_int import random class ReassignPartitionsTest(ProduceConsumeValidateTest): """ These tests validate partition reassignment. Create a topic with few partitions, load some data, trigger partition re-assignment with and without broker failure, check that partition re-assignment can complete and there is no data loss. """ def __init__(self, test_context): """:type test_context: ducktape.tests.test.TestContext""" super(ReassignPartitionsTest, self).__init__(test_context=test_context) self.topic = "test_topic" self.zk = ZookeeperService(test_context, num_nodes=1) self.kafka = KafkaService(test_context, num_nodes=4, zk=self.zk, topics={self.topic: { "partitions": 20, "replication-factor": 3, 'configs': {"min.insync.replicas": 2}} }) self.num_partitions = 20 self.timeout_sec = 60 self.producer_throughput = 1000 self.num_producers = 1 self.num_consumers = 1 def setUp(self): self.zk.start() def min_cluster_size(self): # Override this since we're adding services outside of the constructor return super(ReassignPartitionsTest, self).min_cluster_size() + self.num_producers + self.num_consumers def clean_bounce_some_brokers(self): """Bounce every other broker""" for node in self.kafka.nodes[::2]: self.kafka.restart_node(node, clean_shutdown=True) def reassign_partitions(self, bounce_brokers): partition_info = self.kafka.parse_describe_topic(self.kafka.describe_topic(self.topic)) self.logger.debug("Partitions before reassignment:" + str(partition_info)) # jumble partition assignment in dictionary seed = random.randint(0, 2 ** 31 - 1) self.logger.debug("Jumble partition assignment with seed " + str(seed)) random.seed(seed) # The list may still be in order, but that's ok shuffled_list = range(0, self.num_partitions) random.shuffle(shuffled_list) for i in range(0, self.num_partitions): partition_info["partitions"][i]["partition"] = shuffled_list[i] self.logger.debug("Jumbled partitions: " + str(partition_info)) # send reassign partitions command self.kafka.execute_reassign_partitions(partition_info) if bounce_brokers: # bounce a few brokers at the same time self.clean_bounce_some_brokers() # Wait until finished or timeout wait_until(lambda: self.kafka.verify_reassign_partitions(partition_info), timeout_sec=self.timeout_sec, backoff_sec=.5) @parametrize(security_protocol="PLAINTEXT", bounce_brokers=True) @parametrize(security_protocol="PLAINTEXT", bounce_brokers=False) def test_reassign_partitions(self, bounce_brokers, security_protocol): """Reassign partitions tests. Setup: 1 zk, 3 kafka nodes, 1 topic with partitions=3, replication-factor=3, and min.insync.replicas=2 - Produce messages in the background - Consume messages in the background - Reassign partitions - If bounce_brokers is True, also bounce a few brokers while partition re-assignment is in progress - When done reassigning partitions and bouncing brokers, stop producing, and finish consuming - Validate that every acked message was consumed """ self.kafka.security_protocol = security_protocol self.kafka.interbroker_security_protocol = security_protocol new_consumer = False if self.kafka.security_protocol == "PLAINTEXT" else True self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic, throughput=self.producer_throughput) self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic, new_consumer=new_consumer, consumer_timeout_ms=60000, message_validator=is_int) self.kafka.start() self.run_produce_consume_validate(core_test_action=lambda: self.reassign_partitions(bounce_brokers))
apache-2.0
ryfeus/lambda-packs
Keras_tensorflow/source/tensorflow/contrib/slim/nets.py
191
1609
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TF-Slim Nets. ## Standard Networks. @@alexnet_v2 @@inception_v1 @@inception_v1_base @@inception_v2 @@inception_v2_base @@inception_v3 @@inception_v3_base @@overfeat @@vgg_a @@vgg_16 """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import, # Collapse nets into a single namespace. from tensorflow.contrib.slim.python.slim.nets import alexnet from tensorflow.contrib.slim.python.slim.nets import inception from tensorflow.contrib.slim.python.slim.nets import overfeat from tensorflow.contrib.slim.python.slim.nets import resnet_utils from tensorflow.contrib.slim.python.slim.nets import resnet_v1 from tensorflow.contrib.slim.python.slim.nets import resnet_v2 from tensorflow.contrib.slim.python.slim.nets import vgg from tensorflow.python.util.all_util import make_all # pylint: enable=unused-import __all__ = make_all(__name__)
mit
aldian/tensorflow
tensorflow/contrib/nearest_neighbor/python/kernel_tests/hyperplane_lsh_probes_test.py
51
1939
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for hyperplane_lsh_probes.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.nearest_neighbor.python.ops.nearest_neighbor_ops import hyperplane_lsh_probes from tensorflow.python.platform import test class HyperplaneLshProbesTest(test.TestCase): # We only test the batch functionality of the op here because the multiprobe # tests in hyperplane_lsh_probes_test.cc already cover most of the LSH # functionality. def simple_batch_test(self): with self.test_session(): hyperplanes = np.eye(4) points = np.array([[1.2, 0.5, -0.9, -1.0], [2.0, -3.0, 1.0, -1.5]]) product = np.dot(points, hyperplanes) num_tables = 2 num_hyperplanes_per_table = 2 num_probes = 4 hashes, tables = hyperplane_lsh_probes(product, num_tables, num_hyperplanes_per_table, num_probes) self.assertAllEqual(hashes.eval(), [[3, 0, 2, 2], [2, 2, 0, 3]]) self.assertAllEqual(tables.eval(), [[0, 1, 0, 1], [0, 1, 1, 1]]) if __name__ == '__main__': test.main()
apache-2.0
alvarolopez/nova
nova/objects/block_device.py
6
12238
# Copyright 2013 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from nova import block_device from nova.cells import opts as cells_opts from nova.cells import rpcapi as cells_rpcapi from nova import db from nova import exception from nova.i18n import _ from nova import objects from nova.objects import base from nova.objects import fields LOG = logging.getLogger(__name__) _BLOCK_DEVICE_OPTIONAL_JOINED_FIELD = ['instance'] BLOCK_DEVICE_OPTIONAL_ATTRS = _BLOCK_DEVICE_OPTIONAL_JOINED_FIELD def _expected_cols(expected_attrs): return [attr for attr in expected_attrs if attr in _BLOCK_DEVICE_OPTIONAL_JOINED_FIELD] # TODO(berrange): Remove NovaObjectDictCompat @base.NovaObjectRegistry.register class BlockDeviceMapping(base.NovaPersistentObject, base.NovaObject, base.NovaObjectDictCompat): # Version 1.0: Initial version # Version 1.1: Add instance_uuid to get_by_volume_id method # Version 1.2: Instance version 1.14 # Version 1.3: Instance version 1.15 # Version 1.4: Instance version 1.16 # Version 1.5: Instance version 1.17 # Version 1.6: Instance version 1.18 # Version 1.7: Add update_or_create method # Version 1.8: Instance version 1.19 # Version 1.9: Instance version 1.20 VERSION = '1.9' fields = { 'id': fields.IntegerField(), 'instance_uuid': fields.UUIDField(), 'instance': fields.ObjectField('Instance', nullable=True), 'source_type': fields.StringField(nullable=True), 'destination_type': fields.StringField(nullable=True), 'guest_format': fields.StringField(nullable=True), 'device_type': fields.StringField(nullable=True), 'disk_bus': fields.StringField(nullable=True), 'boot_index': fields.IntegerField(nullable=True), 'device_name': fields.StringField(nullable=True), 'delete_on_termination': fields.BooleanField(default=False), 'snapshot_id': fields.StringField(nullable=True), 'volume_id': fields.StringField(nullable=True), 'volume_size': fields.IntegerField(nullable=True), 'image_id': fields.StringField(nullable=True), 'no_device': fields.BooleanField(default=False), 'connection_info': fields.StringField(nullable=True), } obj_relationships = { 'instance': [('1.0', '1.13'), ('1.2', '1.14'), ('1.3', '1.15'), ('1.4', '1.16'), ('1.5', '1.17'), ('1.6', '1.18'), ('1.8', '1.19'), ('1.9', '1.20')], } @staticmethod def _from_db_object(context, block_device_obj, db_block_device, expected_attrs=None): if expected_attrs is None: expected_attrs = [] for key in block_device_obj.fields: if key in BLOCK_DEVICE_OPTIONAL_ATTRS: continue block_device_obj[key] = db_block_device[key] if 'instance' in expected_attrs: my_inst = objects.Instance(context) my_inst._from_db_object(context, my_inst, db_block_device['instance']) block_device_obj.instance = my_inst block_device_obj._context = context block_device_obj.obj_reset_changes() return block_device_obj def _create(self, context, update_or_create=False): """Create the block device record in the database. In case the id field is set on the object, and if the instance is set raise an ObjectActionError. Resets all the changes on the object. Returns None :param context: security context used for database calls :param update_or_create: consider existing block devices for the instance based on the device name and swap, and only update the ones that match. Normally only used when creating the instance for the first time. """ cell_type = cells_opts.get_cell_type() if cell_type == 'api': raise exception.ObjectActionError( action='create', reason='BlockDeviceMapping cannot be ' 'created in the API cell.') if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.obj_get_changes() if 'instance' in updates: raise exception.ObjectActionError(action='create', reason='instance assigned') cells_create = update_or_create or None if update_or_create: db_bdm = db.block_device_mapping_update_or_create( context, updates, legacy=False) else: db_bdm = db.block_device_mapping_create( context, updates, legacy=False) self._from_db_object(context, self, db_bdm) if cell_type == 'compute': cells_api = cells_rpcapi.CellsAPI() cells_api.bdm_update_or_create_at_top( context, self, create=cells_create) @base.remotable def create(self): self._create(self._context) @base.remotable def update_or_create(self): self._create(self._context, update_or_create=True) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') db.block_device_mapping_destroy(self._context, self.id) delattr(self, base.get_attrname('id')) cell_type = cells_opts.get_cell_type() if cell_type == 'compute': cells_api = cells_rpcapi.CellsAPI() cells_api.bdm_destroy_at_top(self._context, self.instance_uuid, device_name=self.device_name, volume_id=self.volume_id) @base.remotable def save(self): updates = self.obj_get_changes() if 'instance' in updates: raise exception.ObjectActionError(action='save', reason='instance changed') updates.pop('id', None) updated = db.block_device_mapping_update(self._context, self.id, updates, legacy=False) if not updated: raise exception.BDMNotFound(id=self.id) self._from_db_object(self._context, self, updated) cell_type = cells_opts.get_cell_type() if cell_type == 'compute': cells_api = cells_rpcapi.CellsAPI() cells_api.bdm_update_or_create_at_top(self._context, self) @base.remotable_classmethod def get_by_volume_id(cls, context, volume_id, instance_uuid=None, expected_attrs=None): if expected_attrs is None: expected_attrs = [] db_bdm = db.block_device_mapping_get_by_volume_id( context, volume_id, _expected_cols(expected_attrs)) if not db_bdm: raise exception.VolumeBDMNotFound(volume_id=volume_id) # NOTE (ndipanov): Move this to the db layer into a # get_by_instance_and_volume_id method if instance_uuid and instance_uuid != db_bdm['instance_uuid']: raise exception.InvalidVolume( reason=_("Volume does not belong to the " "requested instance.")) return cls._from_db_object(context, cls(), db_bdm, expected_attrs=expected_attrs) @property def is_root(self): return self.boot_index == 0 @property def is_volume(self): return self.destination_type == 'volume' @property def is_image(self): return self.source_type == 'image' def get_image_mapping(self): return block_device.BlockDeviceDict(self).get_image_mapping() def obj_load_attr(self, attrname): if attrname not in BLOCK_DEVICE_OPTIONAL_ATTRS: raise exception.ObjectActionError( action='obj_load_attr', reason='attribute %s not lazy-loadable' % attrname) if not self._context: raise exception.OrphanedObjectError(method='obj_load_attr', objtype=self.obj_name()) LOG.debug("Lazy-loading `%(attr)s' on %(name)s uuid %(uuid)s", {'attr': attrname, 'name': self.obj_name(), 'uuid': self.uuid, }) self.instance = objects.Instance.get_by_uuid(self._context, self.instance_uuid) self.obj_reset_changes(fields=['instance']) @base.NovaObjectRegistry.register class BlockDeviceMappingList(base.ObjectListBase, base.NovaObject): # Version 1.0: Initial version # Version 1.1: BlockDeviceMapping <= version 1.1 # Version 1.2: Added use_slave to get_by_instance_uuid # Version 1.3: BlockDeviceMapping <= version 1.2 # Version 1.4: BlockDeviceMapping <= version 1.3 # Version 1.5: BlockDeviceMapping <= version 1.4 # Version 1.6: BlockDeviceMapping <= version 1.5 # Version 1.7: BlockDeviceMapping <= version 1.6 # Version 1.8: BlockDeviceMapping <= version 1.7 # Version 1.9: BlockDeviceMapping <= version 1.8 # Version 1.10: BlockDeviceMapping <= version 1.9 VERSION = '1.10' fields = { 'objects': fields.ListOfObjectsField('BlockDeviceMapping'), } child_versions = { '1.0': '1.0', '1.1': '1.1', '1.2': '1.1', '1.3': '1.2', '1.4': '1.3', '1.5': '1.4', '1.6': '1.5', '1.7': '1.6', '1.8': '1.7', '1.9': '1.8', '1.10': '1.9', } @base.remotable_classmethod def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False): db_bdms = db.block_device_mapping_get_all_by_instance( context, instance_uuid, use_slave=use_slave) return base.obj_make_list( context, cls(), objects.BlockDeviceMapping, db_bdms or []) def root_bdm(self): try: return next(bdm_obj for bdm_obj in self if bdm_obj.is_root) except StopIteration: return def root_metadata(self, context, image_api, volume_api): root_bdm = self.root_bdm() if not root_bdm: return {} if root_bdm.is_volume: try: volume = volume_api.get(context, root_bdm.volume_id) return volume.get('volume_image_metadata', {}) except Exception: raise exception.InvalidBDMVolume(id=root_bdm.id) elif root_bdm.is_image: try: image_meta = image_api.show(context, root_bdm.image_id) return image_meta.get('properties', {}) except Exception: raise exception.InvalidBDMImage(id=root_bdm.id) else: return {} def block_device_make_list(context, db_list, **extra_args): return base.obj_make_list(context, objects.BlockDeviceMappingList(context), objects.BlockDeviceMapping, db_list, **extra_args) def block_device_make_list_from_dicts(context, bdm_dicts_list): bdm_objects = [objects.BlockDeviceMapping(context=context, **bdm) for bdm in bdm_dicts_list] return BlockDeviceMappingList(objects=bdm_objects)
apache-2.0
fedya/ajenti
plugins/pkgman/pm_ports.py
17
4595
import os import subprocess from ajenti.com import * from ajenti import utils from ajenti import apis class PortsPackageManager(Plugin): implements(apis.pkgman.IPackageManager) platform = ['freebsd'] _pending = {} def refresh(self, st): p = utils.shell('pkg_version|grep \'<\'').split('\n') a = self._get_all() st.upgradeable = {} for x in p: try: s = x.split()[0] st.upgradeable[s] = a[s] except: pass st.pending = {} try: ss = open('/tmp/ajenti-ports-pending.list', 'r').read().splitlines() for s in ss: s = s.split() try: st.pending[s[1]] = s[0] except: pass except: pass st.full = a def get_lists(self): utils.shell_bg('portsnap fetch', output='/tmp/ajenti-ports-output', deleteout=True) def search(self, q, st): ss = utils.shell('cd /usr/ports; make search name=%s' % q).splitlines() a = st.full r = {} while len(ss)>0: if ss[0].startswith('Port'): pkg = apis.pkgman.Package() pkg.name = ss[0].split()[1].split('-')[0] pkg.state = 'removed' if a.has_key(pkg.name) and a[pkg.name].state == 'installed': pkg.state = 'installed' r[pkg.name] = pkg if ss[0].startswith('Info'): pkg.description = ' '.join(ss[0].split()[1:]) ss = ss[1:] return r def mark_install(self, st, name): st.pending[name] = 'install' self._save_pending(st.pending) def mark_remove(self, st, name): st.pending[name] = 'remove' self._save_pending(st.pending) def mark_cancel(self, st, name): del st.pending[name] self._save_pending(st.pending) def mark_cancel_all(self, st): st.pending = {} self._save_pending(st.pending) def apply(self, st): cmd = 'portupgrade -R' cmd2 = 'pkg_deinstall -r' for x in st.pending: if st.pending[x] == 'install': cmd += ' ' + x else: cmd2 += ' ' + x utils.shell_bg('%s; %s'%(cmd,cmd2), output='/tmp/ajenti-ports-output', deleteout=True) def is_busy(self): return os.path.exists('/tmp/ajenti-ports-output') def get_busy_status(self): try: return open('/tmp/ajenti-ports-output', 'r').read().splitlines()[-1] except: return '' def get_expected_result(self, st): cmd = 'portupgrade -Rn' cmd2 = 'pkg_deinstall -nr' for x in st.pending: if st.pending[x] == 'install': cmd += ' ' + x else: cmd2 += ' ' + x r = utils.shell('%s; %s | grep \'[+-] \''%(cmd,cmd2)).splitlines() res = {} for x in r: s = x.split() if not s[0] in ['+', '-']: continue name = '-'.join(s[-1].split('-')[:-1])[1:] if s[0] == '+': res[name] = 'install' else: res[name] = 'remove' return res def abort(self): utils.shell('pkill make') utils.shell('rm /tmp/ajenti-ports-output') def get_info(self, pkg): i = apis.pkgman.PackageInfo() ss = utils.shell('pkg_info \'%s-*\''%pkg).split('\n') i.installed = '' i.available = ss[0].split('-')[-1][:-1] while len(ss)>0 and not ss[0].startswith('Desc'): ss = ss[1:] ss = ss[1:] i.description = '\n'.join(ss) return i def get_info_ui(self, pkg): return None def _save_pending(self, p): f = open('/tmp/ajenti-ports-pending.list', 'w') for x in p: f.write('%s %s\n' % (p[x], x)) f.close() def _get_all(self): ss = utils.shell('pkg_info').splitlines() r = {} for s in ss: s = s.split() try: p = apis.pkgman.Package() nv = s[0].split('-') p.name = '-'.join(nv[0:-1]) p.version = nv[-1] p.description = ' '.join(s[1:]) p.state = 'installed' r[p.name] = p if len(r.keys()) > 250: break except: pass return r
lgpl-3.0
TabitaPL/Pasjans
Angel-3.2/Code/Tools/swigwin-2.0.6/Tools/pyname_patch.py
7
5043
#!/usr/bin/env python """ From SWIG 1.3.37 we deprecated all SWIG symbols that start with Py, since they are inappropriate and discouraged in Python documentation (from http://www.python.org/doc/2.5.2/api/includes.html): "All user visible names defined by Python.h (except those defined by the included standard headers) have one of the prefixes "Py" or "_Py". Names beginning with "_Py" are for internal use by the Python implementation and should not be used by extension writers. Structure member names do not have a reserved prefix. Important: user code should never define names that begin with "Py" or "_Py". This confuses the reader, and jeopardizes the portability of the user code to future Python versions, which may define additional names beginning with one of these prefixes." This file is a simple script used for change all of these symbols, for user code or SWIG itself. """ import re from shutil import copyfile import sys symbols = [ #(old name, new name) ("PySequence_Base", "SwigPySequence_Base"), ("PySequence_Cont", "SwigPySequence_Cont"), ("PySwigIterator_T", "SwigPyIterator_T"), ("PyPairBoolOutputIterator", "SwigPyPairBoolOutputIterator"), ("PySwigIterator", "SwigPyIterator"), ("PySwigIterator_T", "SwigPyIterator_T"), ("PyMapIterator_T", "SwigPyMapIterator_T"), ("PyMapKeyIterator_T", "SwigPyMapKeyIterator_T"), ("PyMapValueIterator_T", "SwigPyMapValueITerator_T"), ("PyObject_ptr", "SwigPtr_PyObject"), ("PyObject_var", "SwigVar_PyObject"), ("PyOper", "SwigPyOper"), ("PySeq", "SwigPySeq"), ("PySequence_ArrowProxy", "SwigPySequence_ArrowProxy"), ("PySequence_Cont", "SwigPySequence_Cont"), ("PySequence_InputIterator", "SwigPySequence_InputIterator"), ("PySequence_Ref", "SwigPySequence_Ref"), ("PySwigClientData", "SwigPyClientData"), ("PySwigClientData_Del", "SwigPyClientData_Del"), ("PySwigClientData_New", "SwigPyClientData_New"), ("PySwigIterator", "SwigPyIterator"), ("PySwigIteratorClosed_T", "SwigPyIteratorClosed_T"), ("PySwigIteratorOpen_T", "SwigPyIteratorOpen_T"), ("PySwigIterator_T", "SwigPyIterator_T"), ("PySwigObject", "SwigPyObject"), ("PySwigObject_Check", "SwigPyObject_Check"), ("PySwigObject_GetDesc", "SwigPyObject_GetDesc"), ("PySwigObject_New", "SwigPyObject_New"), ("PySwigObject_acquire", "SwigPyObject_acquire"), ("PySwigObject_append", "SwigPyObject_append"), ("PySwigObject_as_number", "SwigPyObject_as_number"), ("PySwigObject_compare", "SwigPyObject_compare"), ("PySwigObject_dealloc", "SwigPyObject_dealloc"), ("PySwigObject_disown", "SwigPyObject_disown"), ("PySwigObject_format", "SwigPyObject_format"), ("PySwigObject_getattr", "SwigPyObject_getattr"), ("PySwigObject_hex", "SwigPyObject_hex"), ("PySwigObject_long", "SwigPyObject_long"), ("PySwigObject_next", "SwigPyObject_next"), ("PySwigObject_oct", "SwigPyObject_oct"), ("PySwigObject_own", "SwigPyObject_own"), ("PySwigObject_print", "SwigPyObject_print"), ("PySwigObject_repr", "SwigPyObject_repr"), ("PySwigObject_richcompare", "SwigPyObject_richcompare"), ("PySwigObject_str", "SwigPyObject_str"), ("PySwigObject_type", "SwigPyObject_type"), ("PySwigPacked", "SwigPyPacked"), ("PySwigPacked_Check", "SwigPyPacked_Check"), ("PySwigPacked_New", "SwigPyPacked_New"), ("PySwigPacked_UnpackData", "SwigPyPacked_UnpackData"), ("PySwigPacked_compare", "SwigPyPacked_compare"), ("PySwigPacked_dealloc", "SwigPyPacked_dealloc"), ("PySwigPacked_print", "SwigPyPacked_print"), ("PySwigPacked_repr", "SwigPyPacked_repr"), ("PySwigPacked_str", "SwigPyPacked_str"), ("PySwigPacked_type", "SwigPyPacked_type"), ("pyseq", "swigpyseq"), ("pyswigobject_type", "swigpyobject_type"), ("pyswigpacked_type", "swigpypacked_type"), ] res = [(re.compile("\\b(%s)\\b"%oldname), newname) for oldname, newname in symbols] def patch_file(fn): newf = [] changed = False for line in open(fn): for r, newname in res: line, n = r.subn(newname, line) if n>0: changed = True newf.append(line) if changed: copyfile(fn, fn+".bak") f = open(fn, "w") f.write("".join(newf)) f.close() return changed def main(fns): for fn in fns: try: if patch_file(fn): print "Patched file", fn except IOError: print "Error occured during patching", fn return if __name__=="__main__": if len(sys.argv) > 1: main(sys.argv[1:]) else: print "Patch your interface file for SWIG's Py* symbol name deprecation." print "Usage:" print " %s files..."%sys.argv[0]
mit
numerodix/memcache.rs
pyemc/test_stress.py
2
1302
import time from pyemc.abstractions.test_api import TestCase class TestStress(TestCase): def run_bench(self, func, loops, desc): # untimed warmup warmup_loops = loops / 3 for _ in xrange(loops): func() # timed execution start_time = time.time() for _ in xrange(loops): func() end_time = time.time() interval = end_time - start_time rate = float(loops) / interval self.write("Made %d %s requests in %.2f seconds = %.2f requests/sec" % (loops, desc, interval, rate)) def test_set_const_key_noreply(self): def func(): self.client.set('x', 'abc', noreply=True) self.run_bench(func, 700000, 'constant key set+noreply') def test_set_const_key(self): def func(): self.client.set('x', 'abc') self.run_bench(func, 100000, 'constant key set') def test_get_const_key(self): self.client.set('x', 'abc') def func(): self.client.get('x') self.run_bench(func, 100000, 'constant key get') def test_version(self): '''Does not even touch the storage layer.''' def func(): self.client.version() self.run_bench(func, 100000, 'version')
mit
PaddlePaddle/Paddle
python/paddle/fluid/tests/unittests/test_dist_sparse_tensor_load_ftrl.py
2
1686
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import os import unittest import numpy as np import tempfile import shutil from op_test import OpTest, randomize_probability import paddle import paddle.fluid as fluid import paddle.fluid.layers as layers import paddle.distributed.fleet.base.role_maker as role_maker from paddle.distributed.fleet import fleet from test_dist_sparse_tensor_load_sgd import TestSparseLoadProgram class TestSparseLoadProgramFtrl(TestSparseLoadProgram): """ Test Sparse load operator. """ def test_server_init(self): scope, train_program, startup_program, loss = self.net() with fluid.scope_guard(scope): with fluid.program_guard(train_program, startup_program): optimizer = fluid.optimizer.SGD(1e-3) optimizer = fleet.distributed_optimizer(optimizer, self.strategy) optimizer.minimize(loss) fleet.init_server() if __name__ == "__main__": paddle.enable_static() unittest.main()
apache-2.0
Kiiv/Sick-Beard
sickbeard/clients/requests/packages/charade/escprober.py
206
3273
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from . import constants from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, ISO2022KRSMModel) from .charsetprober import CharSetProber from .codingstatemachine import CodingStateMachine from .compat import wrap_ord class EscCharSetProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mCodingSM = [ CodingStateMachine(HZSMModel), CodingStateMachine(ISO2022CNSMModel), CodingStateMachine(ISO2022JPSMModel), CodingStateMachine(ISO2022KRSMModel) ] self.reset() def reset(self): CharSetProber.reset(self) for codingSM in self._mCodingSM: if not codingSM: continue codingSM.active = True codingSM.reset() self._mActiveSM = len(self._mCodingSM) self._mDetectedCharset = None def get_charset_name(self): return self._mDetectedCharset def get_confidence(self): if self._mDetectedCharset: return 0.99 else: return 0.00 def feed(self, aBuf): for c in aBuf: # PY3K: aBuf is a byte array, so c is an int, not a byte for codingSM in self._mCodingSM: if not codingSM: continue if not codingSM.active: continue codingState = codingSM.next_state(wrap_ord(c)) if codingState == constants.eError: codingSM.active = False self._mActiveSM -= 1 if self._mActiveSM <= 0: self._mState = constants.eNotMe return self.get_state() elif codingState == constants.eItsMe: self._mState = constants.eFoundIt self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8 return self.get_state() return self.get_state()
gpl-3.0
nubark/odoo
addons/payment_paypal/controllers/main.py
7
5396
# -*- coding: utf-8 -*- import json import logging import pprint import urllib import urllib2 import werkzeug from openerp import http, SUPERUSER_ID from openerp.addons.payment.models.payment_acquirer import ValidationError from openerp.http import request _logger = logging.getLogger(__name__) class PaypalController(http.Controller): _notify_url = '/payment/paypal/ipn/' _return_url = '/payment/paypal/dpn/' _cancel_url = '/payment/paypal/cancel/' def _get_return_url(self, **post): """ Extract the return URL from the data coming from paypal. """ return_url = post.pop('return_url', '') if not return_url: custom = json.loads(post.pop('custom', False) or post.pop('cm', False) or '{}') return_url = custom.get('return_url', '/') return return_url def _parse_pdt_response(self, response): """ Parse a text response for a PDT verification . :param response str: text response, structured in the following way: STATUS\nkey1=value1\nkey2=value2...\n :rtype tuple(str, dict) :return: tuple containing the STATUS str and the key/value pairs parsed as a dict """ lines = filter(None, response.split('\n')) status = lines.pop(0) pdt_post = dict(line.split('=', 1) for line in lines) # html unescape for post in pdt_post: pdt_post[post] = urllib.unquote_plus(pdt_post[post]).decode('utf8') return status, pdt_post def paypal_validate_data(self, **post): """ Paypal IPN: three steps validation to ensure data correctness - step 1: return an empty HTTP 200 response -> will be done at the end by returning '' - step 2: POST the complete, unaltered message back to Paypal (preceded by cmd=_notify-validate or _notify-synch for PDT), with same encoding - step 3: paypal send either VERIFIED or INVALID (single word) for IPN or SUCCESS or FAIL (+ data) for PDT Once data is validated, process it. """ res = False new_post = dict(post, cmd='_notify-validate') cr, uid, context = request.cr, request.uid, request.context reference = post.get('item_number') tx = None if reference: tx_ids = request.registry['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context) if tx_ids: tx = request.registry['payment.transaction'].browse(cr, uid, tx_ids[0], context=context) pdt_request = bool(new_post.get('amt')) # check for spefific pdt param if pdt_request: # this means we are in PDT instead of DPN like before # fetch the PDT token new_post['at'] = request.registry['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'payment_paypal.pdt_token') new_post['cmd'] = '_notify-synch' # command is different in PDT than IPN/DPN paypal_urls = request.registry['payment.acquirer']._get_paypal_urls(cr, uid, tx and tx.acquirer_id and tx.acquirer_id.environment or 'prod', context=context) validate_url = paypal_urls['paypal_form_url'] urequest = urllib2.Request(validate_url, werkzeug.url_encode(new_post)) uopen = urllib2.urlopen(urequest) resp = uopen.read() if pdt_request: resp, post = self._parse_pdt_response(resp) if resp == 'VERIFIED' or pdt_request and resp == 'SUCCESS': _logger.info('Paypal: validated data') res = request.registry['payment.transaction'].form_feedback(cr, SUPERUSER_ID, post, 'paypal', context=context) elif resp == 'INVALID' or pdt_request and resp == 'FAIL': _logger.warning('Paypal: answered INVALID/FAIL on data verification') else: _logger.warning('Paypal: unrecognized paypal answer, received %s instead of VERIFIED/SUCCESS or INVALID/FAIL (validation: %s)' % (resp, 'PDT' if pdt_request else 'IPN/DPN')) return res @http.route('/payment/paypal/ipn/', type='http', auth='none', methods=['POST'], csrf=False) def paypal_ipn(self, **post): """ Paypal IPN. """ _logger.info('Beginning Paypal IPN form_feedback with post data %s', pprint.pformat(post)) # debug try: self.paypal_validate_data(**post) except ValidationError: _logger.exception('Unable to validate the Paypal payment') return '' @http.route('/payment/paypal/dpn', type='http', auth="none", methods=['POST', 'GET'], csrf=False) def paypal_dpn(self, **post): """ Paypal DPN """ _logger.info('Beginning Paypal DPN form_feedback with post data %s', pprint.pformat(post)) # debug return_url = self._get_return_url(**post) self.paypal_validate_data(**post) return werkzeug.utils.redirect(return_url) @http.route('/payment/paypal/cancel', type='http', auth="none", csrf=False) def paypal_cancel(self, **post): """ When the user cancels its Paypal payment: GET on this route """ cr, uid, context = request.cr, SUPERUSER_ID, request.context _logger.info('Beginning Paypal cancel with post data %s', pprint.pformat(post)) # debug return_url = self._get_return_url(**post) return werkzeug.utils.redirect(return_url)
gpl-3.0
openhardnudd/QMarkdowner
Cheetah/Servlet.py
16
1261
#!/usr/bin/env python ''' Provides an abstract Servlet baseclass for Cheetah's Template class ''' import sys import os.path class Servlet(object): """ This class is an abstract baseclass for Cheetah.Template.Template. """ transaction = None application = None request = None session = None def respond(self, trans=None): raise NotImplementedError("""\ couldn't find the template's main method. If you are using #extends without #implements, try adding '#implements respond' to your template definition.""") def sleep(self, transaction): super(Servlet, self).sleep(transaction) self.session = None self.request = None self._request = None self.response = None self.transaction = None def shutdown(self): pass def serverSidePath(self, path=None, normpath=os.path.normpath, abspath=os.path.abspath ): if path: return normpath(abspath(path.replace("\\", '/'))) elif hasattr(self, '_filePath') and self._filePath: return normpath(abspath(self._filePath)) else: return None # vim: shiftwidth=4 tabstop=4 expandtab
mit
msegado/edx-platform
openedx/core/djangoapps/user_api/serializers.py
55
2730
""" Django REST Framework serializers for the User API application """ from django.contrib.auth.models import User from rest_framework import serializers from student.models import UserProfile from .models import UserPreference class UserSerializer(serializers.HyperlinkedModelSerializer): """ Serializer that generates a representation of a User entity containing a subset of fields """ name = serializers.SerializerMethodField() preferences = serializers.SerializerMethodField() def get_name(self, user): """ Return the name attribute from the user profile object """ profile = UserProfile.objects.get(user=user) return profile.name def get_preferences(self, user): """ Returns the set of preferences as a dict for the specified user """ return dict([(pref.key, pref.value) for pref in user.preferences.all()]) class Meta(object): model = User # This list is the minimal set required by the notification service fields = ("id", "url", "email", "name", "username", "preferences") read_only_fields = ("id", "email", "username") class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer): """ Serializer that generates a represenation of a UserPreference entity """ user = UserSerializer() class Meta(object): model = UserPreference depth = 1 class RawUserPreferenceSerializer(serializers.ModelSerializer): """ Serializer that generates a raw representation of a user preference. """ user = serializers.PrimaryKeyRelatedField(queryset=User.objects.all()) class Meta(object): model = UserPreference depth = 1 class ReadOnlyFieldsSerializerMixin(object): """ Mixin for use with Serializers that provides a method `get_read_only_fields`, which returns a tuple of all read-only fields on the Serializer. """ @classmethod def get_read_only_fields(cls): """ Return all fields on this Serializer class which are read-only. Expects sub-classes implement Meta.explicit_read_only_fields, which is a tuple declaring read-only fields which were declared explicitly and thus could not be added to the usual cls.Meta.read_only_fields tuple. """ return getattr(cls.Meta, 'read_only_fields', '') + getattr(cls.Meta, 'explicit_read_only_fields', '') @classmethod def get_writeable_fields(cls): """ Return all fields on this serializer that are writeable. """ all_fields = getattr(cls.Meta, 'fields', tuple()) return tuple(set(all_fields) - set(cls.get_read_only_fields()))
agpl-3.0
alexmogavero/home-assistant
homeassistant/components/light/rpi_gpio_pwm.py
7
6513
""" Support for LED lights that can be controlled using PWM. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.pwm/ """ import logging import voluptuous as vol from homeassistant.const import CONF_NAME, CONF_TYPE from homeassistant.components.light import ( Light, ATTR_BRIGHTNESS, ATTR_RGB_COLOR, ATTR_TRANSITION, SUPPORT_BRIGHTNESS, SUPPORT_RGB_COLOR, SUPPORT_TRANSITION, PLATFORM_SCHEMA) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['pwmled==1.1.1'] _LOGGER = logging.getLogger(__name__) CONF_LEDS = 'leds' CONF_DRIVER = 'driver' CONF_PINS = 'pins' CONF_FREQUENCY = 'frequency' CONF_ADDRESS = 'address' CONF_DRIVER_GPIO = 'gpio' CONF_DRIVER_PCA9685 = 'pca9685' CONF_DRIVER_TYPES = [CONF_DRIVER_GPIO, CONF_DRIVER_PCA9685] CONF_LED_TYPE_SIMPLE = 'simple' CONF_LED_TYPE_RGB = 'rgb' CONF_LED_TYPE_RGBW = 'rgbw' CONF_LED_TYPES = [CONF_LED_TYPE_SIMPLE, CONF_LED_TYPE_RGB, CONF_LED_TYPE_RGBW] DEFAULT_COLOR = [255, 255, 255] SUPPORT_SIMPLE_LED = (SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION) SUPPORT_RGB_LED = (SUPPORT_BRIGHTNESS | SUPPORT_RGB_COLOR | SUPPORT_TRANSITION) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_LEDS): vol.All(cv.ensure_list, [ { vol.Required(CONF_NAME): cv.string, vol.Required(CONF_DRIVER): vol.In(CONF_DRIVER_TYPES), vol.Required(CONF_PINS): vol.All(cv.ensure_list, [cv.positive_int]), vol.Required(CONF_TYPE): vol.In(CONF_LED_TYPES), vol.Optional(CONF_FREQUENCY): cv.positive_int, vol.Optional(CONF_ADDRESS): cv.byte } ]) }) def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the PWM LED lights.""" from pwmled.led import SimpleLed from pwmled.led.rgb import RgbLed from pwmled.led.rgbw import RgbwLed from pwmled.driver.gpio import GpioDriver from pwmled.driver.pca9685 import Pca9685Driver leds = [] for led_conf in config[CONF_LEDS]: driver_type = led_conf[CONF_DRIVER] pins = led_conf[CONF_PINS] opt_args = {} if CONF_FREQUENCY in led_conf: opt_args['freq'] = led_conf[CONF_FREQUENCY] if driver_type == CONF_DRIVER_GPIO: driver = GpioDriver(pins, **opt_args) elif driver_type == CONF_DRIVER_PCA9685: if CONF_ADDRESS in led_conf: opt_args['address'] = led_conf[CONF_ADDRESS] driver = Pca9685Driver(pins, **opt_args) else: _LOGGER.error("Invalid driver type") return name = led_conf[CONF_NAME] led_type = led_conf[CONF_TYPE] if led_type == CONF_LED_TYPE_SIMPLE: led = PwmSimpleLed(SimpleLed(driver), name) elif led_type == CONF_LED_TYPE_RGB: led = PwmRgbLed(RgbLed(driver), name) elif led_type == CONF_LED_TYPE_RGBW: led = PwmRgbLed(RgbwLed(driver), name) else: _LOGGER.error("Invalid led type") return leds.append(led) add_devices(leds) class PwmSimpleLed(Light): """Representation of a simple one-color PWM LED.""" def __init__(self, led, name): """Initialize one-color PWM LED.""" self._led = led self._name = name self._is_on = False self._brightness = 255 @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the group.""" return self._name @property def is_on(self): """Return true if device is on.""" return self._is_on @property def brightness(self): """Return the brightness property.""" return self._brightness @property def supported_features(self): """Flag supported features.""" return SUPPORT_SIMPLE_LED def turn_on(self, **kwargs): """Turn on a led.""" if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_TRANSITION in kwargs: transition_time = kwargs[ATTR_TRANSITION] self._led.transition( transition_time, is_on=True, brightness=_from_hass_brightness(self._brightness)) else: self._led.set(is_on=True, brightness=_from_hass_brightness(self._brightness)) self._is_on = True self.schedule_update_ha_state() def turn_off(self, **kwargs): """Turn off a LED.""" if self.is_on: if ATTR_TRANSITION in kwargs: transition_time = kwargs[ATTR_TRANSITION] self._led.transition(transition_time, is_on=False) else: self._led.off() self._is_on = False self.schedule_update_ha_state() class PwmRgbLed(PwmSimpleLed): """Representation of a RGB(W) PWM LED.""" def __init__(self, led, name): """Initialize a RGB(W) PWM LED.""" super().__init__(led, name) self._color = DEFAULT_COLOR @property def rgb_color(self): """Return the color property.""" return self._color @property def supported_features(self): """Flag supported features.""" return SUPPORT_RGB_LED def turn_on(self, **kwargs): """Turn on a LED.""" if ATTR_RGB_COLOR in kwargs: self._color = kwargs[ATTR_RGB_COLOR] if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] if ATTR_TRANSITION in kwargs: transition_time = kwargs[ATTR_TRANSITION] self._led.transition( transition_time, is_on=True, brightness=_from_hass_brightness(self._brightness), color=_from_hass_color(self._color)) else: self._led.set(is_on=True, brightness=_from_hass_brightness(self._brightness), color=_from_hass_color(self._color)) self._is_on = True self.schedule_update_ha_state() def _from_hass_brightness(brightness): """Convert Home Assistant brightness units to percentage.""" return brightness / 255 def _from_hass_color(color): """Convert Home Assistant RGB list to Color tuple.""" from pwmled import Color return Color(*tuple(color))
apache-2.0
chrrrles/ansible-modules-extras
cloud/centurylink/clc_publicip.py
60
12269
#!/usr/bin/python # # Copyright (c) 2015 CenturyLink # # This file is part of Ansible. # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/> # DOCUMENTATION = ''' module: clc_publicip short_description: Add and Delete public ips on servers in CenturyLink Cloud. description: - An Ansible module to add or delete public ip addresses on an existing server or servers in CenturyLink Cloud. version_added: "2.0" options: protocol: descirption: - The protocol that the public IP will listen for. default: TCP choices: ['TCP', 'UDP', 'ICMP'] required: False ports: description: - A list of ports to expose. required: True server_ids: description: - A list of servers to create public ips on. required: True state: description: - Determine wheteher to create or delete public IPs. If present module will not create a second public ip if one already exists. default: present choices: ['present', 'absent'] required: False wait: description: - Whether to wait for the tasks to finish before returning. choices: [ True, False ] default: True required: False requirements: - python = 2.7 - requests >= 2.5.0 - clc-sdk notes: - To use this module, it is required to set the below environment variables which enables access to the Centurylink Cloud - CLC_V2_API_USERNAME, the account login id for the centurylink cloud - CLC_V2_API_PASSWORD, the account passwod for the centurylink cloud - Alternatively, the module accepts the API token and account alias. The API token can be generated using the CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login - CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login - CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud - Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment. ''' EXAMPLES = ''' # Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples - name: Add Public IP to Server hosts: localhost gather_facts: False connection: local tasks: - name: Create Public IP For Servers clc_publicip: protocol: 'TCP' ports: - 80 server_ids: - UC1ACCTSRVR01 - UC1ACCTSRVR02 state: present register: clc - name: debug debug: var=clc - name: Delete Public IP from Server hosts: localhost gather_facts: False connection: local tasks: - name: Create Public IP For Servers clc_publicip: server_ids: - UC1ACCTSRVR01 - UC1ACCTSRVR02 state: absent register: clc - name: debug debug: var=clc ''' __version__ = '${version}' from distutils.version import LooseVersion try: import requests except ImportError: REQUESTS_FOUND = False else: REQUESTS_FOUND = True # # Requires the clc-python-sdk. # sudo pip install clc-sdk # try: import clc as clc_sdk from clc import CLCException except ImportError: CLC_FOUND = False clc_sdk = None else: CLC_FOUND = True class ClcPublicIp(object): clc = clc_sdk module = None group_dict = {} def __init__(self, module): """ Construct module """ self.module = module if not CLC_FOUND: self.module.fail_json( msg='clc-python-sdk required for this module') if not REQUESTS_FOUND: self.module.fail_json( msg='requests library is required for this module') if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'): self.module.fail_json( msg='requests library version should be >= 2.5.0') self._set_user_agent(self.clc) def process_request(self): """ Process the request - Main Code Path :param params: dictionary of module parameters :return: Returns with either an exit_json or fail_json """ self._set_clc_credentials_from_env() params = self.module.params server_ids = params['server_ids'] ports = params['ports'] protocol = params['protocol'] state = params['state'] requests = [] chagned_server_ids = [] changed = False if state == 'present': changed, chagned_server_ids, requests = self.ensure_public_ip_present( server_ids=server_ids, protocol=protocol, ports=ports) elif state == 'absent': changed, chagned_server_ids, requests = self.ensure_public_ip_absent( server_ids=server_ids) else: return self.module.fail_json(msg="Unknown State: " + state) self._wait_for_requests_to_complete(requests) return self.module.exit_json(changed=changed, server_ids=chagned_server_ids) @staticmethod def _define_module_argument_spec(): """ Define the argument spec for the ansible module :return: argument spec dictionary """ argument_spec = dict( server_ids=dict(type='list', required=True), protocol=dict(default='TCP', choices=['TCP', 'UDP', 'ICMP']), ports=dict(type='list', required=True), wait=dict(type='bool', default=True), state=dict(default='present', choices=['present', 'absent']), ) return argument_spec def ensure_public_ip_present(self, server_ids, protocol, ports): """ Ensures the given server ids having the public ip available :param server_ids: the list of server ids :param protocol: the ip protocol :param ports: the list of ports to expose :return: (changed, changed_server_ids, results) changed: A flag indicating if there is any change changed_server_ids : the list of server ids that are changed results: The result list from clc public ip call """ changed = False results = [] changed_server_ids = [] servers = self._get_servers_from_clc( server_ids, 'Failed to obtain server list from the CLC API') servers_to_change = [ server for server in servers if len( server.PublicIPs().public_ips) == 0] ports_to_expose = [{'protocol': protocol, 'port': port} for port in ports] for server in servers_to_change: if not self.module.check_mode: result = self._add_publicip_to_server(server, ports_to_expose) results.append(result) changed_server_ids.append(server.id) changed = True return changed, changed_server_ids, results def _add_publicip_to_server(self, server, ports_to_expose): result = None try: result = server.PublicIPs().Add(ports_to_expose) except CLCException, ex: self.module.fail_json(msg='Failed to add public ip to the server : {0}. {1}'.format( server.id, ex.response_text )) return result def ensure_public_ip_absent(self, server_ids): """ Ensures the given server ids having the public ip removed if there is any :param server_ids: the list of server ids :return: (changed, changed_server_ids, results) changed: A flag indicating if there is any change changed_server_ids : the list of server ids that are changed results: The result list from clc public ip call """ changed = False results = [] changed_server_ids = [] servers = self._get_servers_from_clc( server_ids, 'Failed to obtain server list from the CLC API') servers_to_change = [ server for server in servers if len( server.PublicIPs().public_ips) > 0] for server in servers_to_change: if not self.module.check_mode: result = self._remove_publicip_from_server(server) results.append(result) changed_server_ids.append(server.id) changed = True return changed, changed_server_ids, results def _remove_publicip_from_server(self, server): try: for ip_address in server.PublicIPs().public_ips: result = ip_address.Delete() except CLCException, ex: self.module.fail_json(msg='Failed to remove public ip from the server : {0}. {1}'.format( server.id, ex.response_text )) return result def _wait_for_requests_to_complete(self, requests_lst): """ Waits until the CLC requests are complete if the wait argument is True :param requests_lst: The list of CLC request objects :return: none """ if not self.module.params['wait']: return for request in requests_lst: request.WaitUntilComplete() for request_details in request.requests: if request_details.Status() != 'succeeded': self.module.fail_json( msg='Unable to process public ip request') def _set_clc_credentials_from_env(self): """ Set the CLC Credentials on the sdk by reading environment variables :return: none """ env = os.environ v2_api_token = env.get('CLC_V2_API_TOKEN', False) v2_api_username = env.get('CLC_V2_API_USERNAME', False) v2_api_passwd = env.get('CLC_V2_API_PASSWD', False) clc_alias = env.get('CLC_ACCT_ALIAS', False) api_url = env.get('CLC_V2_API_URL', False) if api_url: self.clc.defaults.ENDPOINT_URL_V2 = api_url if v2_api_token and clc_alias: self.clc._LOGIN_TOKEN_V2 = v2_api_token self.clc._V2_ENABLED = True self.clc.ALIAS = clc_alias elif v2_api_username and v2_api_passwd: self.clc.v2.SetCredentials( api_username=v2_api_username, api_passwd=v2_api_passwd) else: return self.module.fail_json( msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD " "environment variables") def _get_servers_from_clc(self, server_ids, message): """ Gets list of servers form CLC api """ try: return self.clc.v2.Servers(server_ids).servers except CLCException as exception: self.module.fail_json(msg=message + ': %s' % exception) @staticmethod def _set_user_agent(clc): if hasattr(clc, 'SetRequestsSession'): agent_string = "ClcAnsibleModule/" + __version__ ses = requests.Session() ses.headers.update({"Api-Client": agent_string}) ses.headers['User-Agent'] += " " + agent_string clc.SetRequestsSession(ses) def main(): """ The main function. Instantiates the module and calls process_request. :return: none """ module = AnsibleModule( argument_spec=ClcPublicIp._define_module_argument_spec(), supports_check_mode=True ) clc_public_ip = ClcPublicIp(module) clc_public_ip.process_request() from ansible.module_utils.basic import * # pylint: disable=W0614 if __name__ == '__main__': main()
gpl-3.0
iw3hxn/server
openerp/pychart/afm/Symbol.py
15
1508
# -*- coding: utf-8 -*- # AFM font Symbol (path: /usr/share/fonts/afms/adobe/psyr.afm). # Derived from Ghostscript distribution. # Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code. import dir dir.afm["Symbol"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 250, 333, 713, 500, 549, 833, 778, 439, 333, 333, 500, 549, 250, 549, 250, 278, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 278, 278, 549, 549, 549, 444, 549, 722, 667, 722, 612, 611, 763, 603, 722, 333, 631, 722, 686, 889, 722, 722, 768, 741, 556, 592, 611, 690, 439, 768, 645, 795, 611, 333, 863, 333, 658, 500, 500, 631, 549, 549, 494, 439, 521, 411, 603, 329, 603, 549, 549, 576, 521, 549, 549, 521, 549, 603, 439, 576, 713, 686, 493, 686, 494, 480, 200, 480, 549, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 620, 247, 549, 167, 713, 500, 753, 753, 753, 753, 1042, 987, 603, 987, 603, 400, 549, 411, 549, 549, 713, 494, 460, 549, 549, 549, 549, 1000, 603, 1000, 658, 823, 686, 795, 987, 768, 768, 823, 768, 768, 713, 713, 713, 713, 713, 713, 713, 768, 713, 790, 790, 890, 823, 549, 250, 713, 603, 603, 1042, 987, 603, 987, 603, 494, 329, 790, 790, 786, 713, 384, 384, 384, 384, 384, 384, 494, 494, 494, 494, 500, 329, 274, 686, 686, 686, 384, 384, 384, 384, 384, 384, 494, 494, 494, )
agpl-3.0
dkmatt0/banking-alpha-angularjs
backend/json-bank.py
1
1645
#!/usr/bin/env python3 import simplejsondb as sjdb import json # user { id, nickname, name, password, address, } # # session { id, fk_user, last_login, last_activity, } # # payee { id, name, desc, } # # category { id, name, desc, parent_id, } # # mode { id, name, } # # bank { id, name, bic_code, sort_code, address, phone, mail, website, # contact_name, contact_mail, contact_phone, contact_fax, note, } # # account { id, bank, user, name, iban, number, # min_authorised, min_desired, note, } # # transaction { id, account, payee, category, mode, date, order, # amount, balance, number, note, scheduled, hidden, } # # scheduler { id, transaction, next, frequency, automatic_add, } with open('data/config.json', 'r') as f: config_data = json.load(f) test = sjdb.SimpleJsonDB('test.json', config_data=config_data) # test.set_indent(2) test.save('transaction', amount='20.65') # 1 test.save_db() # print(test.save('transaction', amount='30.47')) # 2 # print(test.save('transaction', convert=False, amount='40.86')) # 3 # print(test.save('transaction', amount='10.20')) # 4 # print(test.save('transaction', id=4, date='2013-01-05')) # 4 # print('# 1 ################') # print(test.show('transaction')) # print('# 2 ################') # test.move('transaction', 3, 0) # print('# 3 ################') # print(test.show('transaction')) # import time # starttt = time.clock() # for i in range(10000): # start = time.clock() # test.save('transaction') # print(i, time.clock() - start) # print(time.clock() - starttt) test.save_db()
agpl-3.0
pinkavaj/gnuradio
gr-fec/python/fec/polar/channel_construction_bec.py
22
8068
#!/usr/bin/env python # # Copyright 2015 Free Software Foundation, Inc. # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import numpy as np import helper_functions as hf def bec_channel(eta): ''' binary erasure channel (BEC) for each y e Y W(y|0) * W(y|1) = 0 or W(y|0) = W(y|1) transistions are 1 -> 1 or 0 -> 0 or {0, 1} -> ? (erased symbol) ''' # looks like BSC but should be interpreted differently. w = np.array((1 - eta, eta, 1 - eta), dtype=float) return w def odd_rec(iwn): return iwn ** 2 def even_rec(iwn): return 2 * iwn - iwn ** 2 def calc_one_recursion(iw0): iw1 = np.zeros(2 * len(iw0)) # double values for i in range(len(iw0)): # careful indices screw you because paper is '1' based :( iw1[2 * i] = odd_rec(iw0[i]) iw1[2 * i + 1] = even_rec(iw0[i]) return iw1 def calculate_bec_channel_capacities_loop(initial_channel, block_power): # compare [0, Arikan] eq. 6 iw = np.array([initial_channel, ], dtype=float) for i in range(block_power): iw = calc_one_recursion(iw) return iw def calc_vector_capacities_one_recursion(iw0): degraded = odd_rec(iw0) upgraded = even_rec(iw0) iw1 = np.empty(2 * len(iw0), dtype=degraded.dtype) iw1[0::2] = degraded iw1[1::2] = upgraded return iw1 def calculate_bec_channel_capacities_vector(initial_channel, block_power): # compare [0, Arikan] eq. 6 # this version is ~ 180 times faster than the loop version with 2**22 synthetic channels iw = np.array([initial_channel, ], dtype=float) for i in range(block_power): iw = calc_vector_capacities_one_recursion(iw) return iw def calculate_bec_channel_capacities(eta, block_size): # compare [0, Arikan] eq. 6 iw = 1 - eta # holds for BEC as stated in paper lw = hf.power_of_2_int(block_size) return calculate_bec_channel_capacities_vector(iw, lw) def calculate_z_parameters_one_recursion(z_params): z_next = np.empty(2 * z_params.size, dtype=z_params.dtype) z_sq = z_params ** 2 z_low = 2 * z_params - z_sq z_next[0::2] = z_low z_next[1::2] = z_sq return z_next def calculate_bec_channel_z_parameters(eta, block_size): # compare [0, Arikan] eq. 38 block_power = hf.power_of_2_int(block_size) z_params = np.array([eta, ], dtype=float) for block_size in range(block_power): z_params = calculate_z_parameters_one_recursion(z_params) return z_params def design_snr_to_bec_eta(design_snr): # minimum design snr = -1.5917 corresponds to BER = 0.5 s = 10. ** (design_snr / 10.) return np.exp(-s) def bhattacharyya_bounds(design_snr, block_size): ''' Harish Vangala, Emanuele Viterbo, Yi Hong: 'A Comparative Study of Polar Code Constructions for the AWGN Channel', 2015 In this paper it is called Bhattacharyya bounds channel construction and is abbreviated PCC-0 Best design SNR for block_size = 2048, R = 0.5, is 0dB. Compare with Arikan: 'Channel Polarization: A Method for Constructing Capacity-Achieving Codes for Symmetric Binary-Input Memoryless Channels. Proposition 5. inequalities turn into equalities for BEC channel. Otherwise they represent an upper bound. Also compare [0, Arikan] eq. 6 and 38 For BEC that translates to capacity(i) = 1 - bhattacharyya(i) :return Z-parameters in natural bit-order. Choose according to desired rate. ''' eta = design_snr_to_bec_eta(design_snr) return calculate_bec_channel_z_parameters(eta, block_size) def plot_channel_capacities(capacity, save_file=None): block_size = len(capacity) try: import matplotlib.pyplot as plt # FUN with matplotlib LaTeX fonts! http://matplotlib.org/users/usetex.html plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.rc('figure', autolayout=True) plt.plot(capacity) plt.xlim([0, block_size]) plt.ylim([-0.01, 1.01]) plt.xlabel('synthetic channel number') plt.ylabel('channel capacity') # plt.title('BEC channel construction') plt.grid() plt.gcf().set_size_inches(plt.gcf().get_size_inches() * .5) if save_file: plt.savefig(save_file) plt.show() except ImportError: pass # only plot in case matplotlib is installed def plot_average_channel_distance(save_file=None): eta = 0.5 # design_snr_to_bec_eta(-1.5917) powers = np.arange(4, 26) try: import matplotlib.pyplot as plt import matplotlib # FUN with matplotlib LaTeX fonts! http://matplotlib.org/users/usetex.html plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.rc('figure', autolayout=True) dist = [] medians = [] initial_channel = 1 - eta for p in powers: bs = int(2 ** p) capacities = calculate_bec_channel_capacities(eta, bs) avg_capacity = np.repeat(initial_channel, len(capacities)) averages = np.abs(capacities - avg_capacity) avg_distance = np.sum(averages) / float(len(capacities)) dist.append(avg_distance) variance = np.std(averages) medians.append(variance) plt.errorbar(powers, dist, yerr=medians) plt.grid() plt.xlabel(r'block size $N$') plt.ylabel(r'$\frac{1}{N} \sum_i |I(W_N^{(i)}) - 0.5|$') axes = plt.axes() tick_values = np.array(axes.get_xticks().tolist()) tick_labels = np.array(tick_values, dtype=int) tick_labels = ['$2^{' + str(i) + '}$' for i in tick_labels] plt.xticks(tick_values, tick_labels) plt.xlim((powers[0], powers[-1])) plt.ylim((0.2, 0.5001)) plt.gcf().set_size_inches(plt.gcf().get_size_inches() * .5) if save_file: plt.savefig(save_file) plt.show() except ImportError: pass def plot_capacity_histogram(design_snr, save_file=None): eta = design_snr_to_bec_eta(design_snr) # capacities = calculate_bec_channel_capacities(eta, block_size) try: import matplotlib.pyplot as plt # FUN with matplotlib LaTeX fonts! http://matplotlib.org/users/usetex.html plt.rc('text', usetex=True) plt.rc('font', family='serif') plt.rc('figure', autolayout=True) block_sizes = [32, 128, 512] for b in block_sizes: capacities = calculate_bec_channel_capacities(eta, b) w = 1. / float(len(capacities)) weights = [w, ] * b plt.hist(capacities, bins=b, weights=weights, range=(0.95, 1.0)) plt.grid() plt.xlabel('synthetic channel capacity') plt.ylabel('normalized item count') print(plt.gcf().get_size_inches()) plt.gcf().set_size_inches(plt.gcf().get_size_inches() * .5) if save_file: plt.savefig(save_file) plt.show() except ImportError: pass def main(): print 'channel construction main' n = 11 block_size = int(2 ** n) design_snr = -1.59 eta = design_snr_to_bec_eta(design_snr) # print(calculate_bec_channel_z_parameters(eta, block_size)) # capacity = calculate_bec_channel_capacities(eta, block_size) # plot_average_channel_distance() calculate_bec_channel_z_parameters(eta, block_size) if __name__ == '__main__': main()
gpl-3.0
sbrunner/QGIS
python/plugins/processing/algs/qgis/UniqueValues.py
5
6800
# -*- coding: utf-8 -*- """ *************************************************************************** UniqueValues.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import codecs from qgis.PyQt.QtGui import QIcon from qgis.core import (QgsCoordinateReferenceSystem, QgsWkbTypes, QgsFeature, QgsFeatureSink, QgsFeatureRequest, QgsFields, QgsProcessing, QgsProcessingParameterField, QgsProcessingParameterFeatureSource, QgsProcessingParameterFeatureSink, QgsProcessingOutputNumber, QgsProcessingOutputString, QgsProcessingParameterFileDestination, QgsProcessingOutputHtml) from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0] class UniqueValues(QgisAlgorithm): INPUT = 'INPUT' FIELDS = 'FIELDS' TOTAL_VALUES = 'TOTAL_VALUES' UNIQUE_VALUES = 'UNIQUE_VALUES' OUTPUT = 'OUTPUT' OUTPUT_HTML_FILE = 'OUTPUT_HTML_FILE' def icon(self): return QIcon(os.path.join(pluginPath, 'images', 'ftools', 'unique.png')) def group(self): return self.tr('Vector analysis') def groupId(self): return 'vectoranalysis' def __init__(self): super().__init__() def initAlgorithm(self, config=None): self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT, self.tr('Input layer'), types=[QgsProcessing.TypeVector])) self.addParameter(QgsProcessingParameterField(self.FIELDS, self.tr('Target field(s)'), parentLayerParameterName=self.INPUT, type=QgsProcessingParameterField.Any, allowMultiple=True)) self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT, self.tr('Unique values'), optional=True, defaultValue='')) self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT_HTML_FILE, self.tr('HTML report'), self.tr('HTML files (*.html)'), None, True)) self.addOutput(QgsProcessingOutputHtml(self.OUTPUT_HTML_FILE, self.tr('HTML report'))) self.addOutput(QgsProcessingOutputNumber(self.TOTAL_VALUES, self.tr('Total unique values'))) self.addOutput(QgsProcessingOutputString(self.UNIQUE_VALUES, self.tr('Unique values'))) def name(self): return 'listuniquevalues' def displayName(self): return self.tr('List unique values') def processAlgorithm(self, parameters, context, feedback): source = self.parameterAsSource(parameters, self.INPUT, context) field_names = self.parameterAsFields(parameters, self.FIELDS, context) fields = QgsFields() field_indices = [] for field_name in field_names: field_index = source.fields().lookupField(field_name) if field_index < 0: feedback.reportError(self.tr('Invalid field name {}').format(field_name)) continue field = source.fields()[field_index] fields.append(field) field_indices.append(field_index) (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, fields, QgsWkbTypes.NoGeometry, QgsCoordinateReferenceSystem()) results = {} values = set() if len(field_indices) == 1: # one field, can use provider optimised method values = tuple([v] for v in source.uniqueValues(field_indices[0])) else: # have to scan whole table # TODO - add this support to QgsVectorDataProvider so we can run it on # the backend request = QgsFeatureRequest().setFlags(QgsFeatureRequest.NoGeometry) request.setSubsetOfAttributes(field_indices) total = 100.0 / source.featureCount() if source.featureCount() else 0 for current, f in enumerate(source.getFeatures(request)): if feedback.isCanceled(): break value = tuple(f.attribute(i) for i in field_indices) values.add(value) feedback.setProgress(int(current * total)) if sink: for value in values: if feedback.isCanceled(): break f = QgsFeature() f.setAttributes([attr for attr in value]) sink.addFeature(f, QgsFeatureSink.FastInsert) results[self.OUTPUT] = dest_id output_file = self.parameterAsFileOutput(parameters, self.OUTPUT_HTML_FILE, context) if output_file: self.createHTML(output_file, values) results[self.OUTPUT_HTML_FILE] = output_file results[self.TOTAL_VALUES] = len(values) results[self.UNIQUE_VALUES] = ';'.join([','.join([str(attr) for attr in v]) for v in values]) return results def createHTML(self, outputFile, algData): with codecs.open(outputFile, 'w', encoding='utf-8') as f: f.write('<html><head>') f.write('<meta http-equiv="Content-Type" content="text/html; \ charset=utf-8" /></head><body>') f.write(self.tr('<p>Total unique values: ') + str(len(algData)) + '</p>') f.write(self.tr('<p>Unique values:</p>')) f.write('<ul>') for s in algData: f.write('<li>' + ','.join([str(attr) for attr in s]) + '</li>') f.write('</ul></body></html>')
gpl-2.0
Gitlab11/odoo
addons/account_payment/__init__.py
436
1279
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #---------------------------------------------------------- # Init Sales #---------------------------------------------------------- import account_payment import wizard import account_move_line import account_invoice import report # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
nysan/yocto-autobuilder
lib/python2.6/site-packages/buildbot_slave-0.8.4-py2.6.egg/buildslave/monkeypatches/bug4881.py
16
6811
# coding=utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members import os from twisted.internet import process from twisted.python import log def patch(): log.msg("Applying patch for http://twistedmatrix.com/trac/ticket/4881") process._listOpenFDs = _listOpenFDs ############################################################################# # Everything below this line was taken verbatim from Twisted, except as # annotated. ######## # r31474:trunk/LICENSE # Copyright (c) 2001-2010 # Allen Short # Andy Gayton # Andrew Bennetts # Antoine Pitrou # Apple Computer, Inc. # Benjamin Bruheim # Bob Ippolito # Canonical Limited # Christopher Armstrong # David Reid # Donovan Preston # Eric Mangold # Eyal Lotem # Itamar Shtull-Trauring # James Knight # Jason A. Mobarak # Jean-Paul Calderone # Jessica McKellar # Jonathan Jacobs # Jonathan Lange # Jonathan D. Simms # Jürgen Hermann # Kevin Horn # Kevin Turner # Mary Gardiner # Matthew Lefkowitz # Massachusetts Institute of Technology # Moshe Zadka # Paul Swartz # Pavel Pergamenshchik # Ralph Meijer # Sean Riley # Software Freedom Conservancy # Travis B. Hartwell # Thijs Triemstra # Thomas Herve # Timothy Allen # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ######## # r31474:trunk/twisted/internet/process.py # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. class _FDDetector(object): """ This class contains the logic necessary to decide which of the available system techniques should be used to detect the open file descriptors for the current process. The chosen technique gets monkey-patched into the _listOpenFDs method of this class so that the detection only needs to occur once. @ivars listdir: The implementation of listdir to use. This gets overwritten by the test cases. @ivars getpid: The implementation of getpid to use, returns the PID of the running process. @ivars openfile: The implementation of open() to use, by default the Python builtin. """ # So that we can unit test this listdir = os.listdir getpid = os.getpid openfile = open def _listOpenFDs(self): """ Figure out which implementation to use, then run it. """ self._listOpenFDs = self._getImplementation() return self._listOpenFDs() def _getImplementation(self): """ Check if /dev/fd works, if so, use that. Otherwise, check if /proc/%d/fd exists, if so use that. Otherwise, ask resource.getrlimit, if that throws an exception, then fallback to _fallbackFDImplementation. """ try: self.listdir("/dev/fd") if self._checkDevFDSanity(): # FreeBSD support :-) return self._devFDImplementation else: return self._fallbackFDImplementation except: try: self.listdir("/proc/%d/fd" % (self.getpid(),)) return self._procFDImplementation except: try: self._resourceFDImplementation() # Imports resource return self._resourceFDImplementation except: return self._fallbackFDImplementation def _checkDevFDSanity(self): """ Returns true iff opening a file modifies the fds visible in /dev/fd, as it should on a sane platform. """ start = self.listdir("/dev/fd") self.openfile("/dev/null", "r") # changed in Buildbot to hush pyflakes end = self.listdir("/dev/fd") return start != end def _devFDImplementation(self): """ Simple implementation for systems where /dev/fd actually works. See: http://www.freebsd.org/cgi/man.cgi?fdescfs """ dname = "/dev/fd" result = [int(fd) for fd in os.listdir(dname)] return result def _procFDImplementation(self): """ Simple implementation for systems where /proc/pid/fd exists (we assume it works). """ dname = "/proc/%d/fd" % (os.getpid(),) return [int(fd) for fd in os.listdir(dname)] def _resourceFDImplementation(self): """ Fallback implementation where the resource module can inform us about how many FDs we can expect. Note that on OS-X we expect to be using the /dev/fd implementation. """ import resource maxfds = resource.getrlimit(resource.RLIMIT_NOFILE)[1] + 1 # OS-X reports 9223372036854775808. That's a lot of fds # to close if maxfds > 1024: maxfds = 1024 return xrange(maxfds) def _fallbackFDImplementation(self): """ Fallback-fallback implementation where we just assume that we need to close 256 FDs. """ maxfds = 256 return xrange(maxfds) detector = _FDDetector() def _listOpenFDs(): """ Use the global detector object to figure out which FD implementation to use. """ return detector._listOpenFDs()
gpl-2.0
imtapps/django-imt-fork
tests/modeltests/fixtures/tests.py
41
29331
from __future__ import absolute_import from django.contrib.sites.models import Site from django.core import management from django.db import connection, IntegrityError from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from django.utils import six from .models import Article, Book, Spy, Tag, Visa class TestCaseFixtureLoadingTests(TestCase): fixtures = ['fixture1.json', 'fixture2.json'] def testClassFixtures(self): "Check that test case has installed 3 fixture objects" self.assertEqual(Article.objects.count(), 3) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker has no place on ESPN>', ]) class DumpDataAssertMixin(object): def _dumpdata_assert(self, args, output, format='json', natural_keys=False, use_base_manager=False, exclude_list=[]): new_io = six.StringIO() management.call_command('dumpdata', *args, **{'format': format, 'stdout': new_io, 'stderr': new_io, 'use_natural_keys': natural_keys, 'use_base_manager': use_base_manager, 'exclude': exclude_list}) command_output = new_io.getvalue().strip() if format == "json": self.assertJSONEqual(command_output, output) elif format == "xml": self.assertXMLEqual(command_output, output) else: self.assertEqual(command_output, output) class FixtureLoadingTests(DumpDataAssertMixin, TestCase): def test_initial_data(self): # syncdb introduces 1 initial data object from initial_data.json. self.assertQuerysetEqual(Book.objects.all(), [ '<Book: Achieving self-awareness of Python programs>' ]) def test_loading_and_dumping(self): Site.objects.all().delete() # Load fixture 1. Single JSON file, with two objects. management.call_command('loaddata', 'fixture1.json', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]') # Try just dumping the contents of fixtures.Category self._dumpdata_assert(['fixtures.Category'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]') # ...and just fixtures.Article self._dumpdata_assert(['fixtures.Article'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]') # ...and both self._dumpdata_assert(['fixtures.Category', 'fixtures.Article'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]') # Specify a specific model twice self._dumpdata_assert(['fixtures.Article', 'fixtures.Article'], '[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]') # Specify a dump that specifies Article both explicitly and implicitly self._dumpdata_assert(['fixtures.Article', 'fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]') # Same again, but specify in the reverse order self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]') # Specify one model from one application, and an entire other application. self._dumpdata_assert(['fixtures.Category', 'sites'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]') # Load fixture 2. JSON file imported by default. Overwrites some existing objects management.call_command('loaddata', 'fixture2.json', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker has no place on ESPN>', ]) # Load fixture 3, XML format. management.call_command('loaddata', 'fixture3.xml', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: XML identified as leading cause of cancer>', '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker on TV is great!>', ]) # Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne. management.call_command('loaddata', 'fixture6.json', verbosity=0, commit=False) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Copyright is fine the way it is> tagged "copyright">', '<Tag: <Article: Copyright is fine the way it is> tagged "law">' ]) # Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne. management.call_command('loaddata', 'fixture7.xml', verbosity=0, commit=False) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Copyright is fine the way it is> tagged "copyright">', '<Tag: <Article: Copyright is fine the way it is> tagged "legal">', '<Tag: <Article: Django conquers world!> tagged "django">', '<Tag: <Article: Django conquers world!> tagged "world domination">' ]) # Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany. management.call_command('loaddata', 'fixture8.json', verbosity=0, commit=False) self.assertQuerysetEqual(Visa.objects.all(), [ '<Visa: Django Reinhardt Can add user, Can change user, Can delete user>', '<Visa: Stephane Grappelli Can add user>', '<Visa: Prince >' ]) # Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany. management.call_command('loaddata', 'fixture9.xml', verbosity=0, commit=False) self.assertQuerysetEqual(Visa.objects.all(), [ '<Visa: Django Reinhardt Can add user, Can change user, Can delete user>', '<Visa: Stephane Grappelli Can add user, Can delete user>', '<Visa: Artist formerly known as "Prince" Can change user>' ]) self.assertQuerysetEqual(Book.objects.all(), [ '<Book: Achieving self-awareness of Python programs>', '<Book: Music for all ages by Artist formerly known as "Prince" and Django Reinhardt>' ]) # Load a fixture that doesn't exist management.call_command('loaddata', 'unknown.json', verbosity=0, commit=False) # object list is unaffected self.assertQuerysetEqual(Article.objects.all(), [ '<Article: XML identified as leading cause of cancer>', '<Article: Django conquers world!>', '<Article: Copyright is fine the way it is>', '<Article: Poker on TV is great!>', ]) # By default, you get raw keys on dumpdata self._dumpdata_assert(['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [3, 1]}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]') # But you can get natural keys if you ask for them and they are available self._dumpdata_assert(['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]', natural_keys=True) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker on TV is great!", "pub_date": "2006-06-16T11:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}, {"pk": 4, "model": "fixtures.article", "fields": {"headline": "Django conquers world!", "pub_date": "2006-06-16T15:00:00"}}, {"pk": 5, "model": "fixtures.article", "fields": {"headline": "XML identified as leading cause of cancer", "pub_date": "2006-06-16T16:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "legal", "tagged_id": 3}}, {"pk": 3, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "django", "tagged_id": 4}}, {"pk": 4, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "world domination", "tagged_id": 4}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Artist formerly known as \\"Prince\\""}}, {"pk": 1, "model": "fixtures.visa", "fields": {"person": ["Django Reinhardt"], "permissions": [["add_user", "auth", "user"], ["change_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 2, "model": "fixtures.visa", "fields": {"person": ["Stephane Grappelli"], "permissions": [["add_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 3, "model": "fixtures.visa", "fields": {"person": ["Artist formerly known as \\"Prince\\""], "permissions": [["change_user", "auth", "user"]]}}, {"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]', natural_keys=True) # Dump the current contents of the database as an XML fixture self._dumpdata_assert(['fixtures'], """<?xml version="1.0" encoding="utf-8"?> <django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker on TV is great!</field><field type="DateTimeField" name="pub_date">2006-06-16T11:00:00</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Copyright is fine the way it is</field><field type="DateTimeField" name="pub_date">2006-06-16T14:00:00</field></object><object pk="4" model="fixtures.article"><field type="CharField" name="headline">Django conquers world!</field><field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field></object><object pk="5" model="fixtures.article"><field type="CharField" name="headline">XML identified as leading cause of cancer</field><field type="DateTimeField" name="pub_date">2006-06-16T16:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">legal</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="3" model="fixtures.tag"><field type="CharField" name="name">django</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="4" model="fixtures.tag"><field type="CharField" name="name">world domination</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Artist formerly known as "Prince"</field></object><object pk="1" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Django Reinhardt</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="2" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Stephane Grappelli</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="3" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Artist formerly known as "Prince"</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="1" model="fixtures.book"><field type="CharField" name="name">Music for all ages</field><field to="fixtures.person" name="authors" rel="ManyToManyRel"><object><natural>Artist formerly known as "Prince"</natural></object><object><natural>Django Reinhardt</natural></object></field></object><object pk="10" model="fixtures.book"><field type="CharField" name="name">Achieving self-awareness of Python programs</field><field to="fixtures.person" name="authors" rel="ManyToManyRel"></field></object></django-objects>""", format='xml', natural_keys=True) def test_dumpdata_with_excludes(self): # Load fixture1 which has a site, two articles, and a category Site.objects.all().delete() management.call_command('loaddata', 'fixture1.json', verbosity=0, commit=False) # Excluding fixtures app should only leave sites self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]', exclude_list=['fixtures']) # Excluding fixtures.Article/Book should leave fixtures.Category self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, {"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book']) # Excluding fixtures and fixtures.Article/Book should be a no-op self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, {"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book']) # Excluding sites and fixtures.Article/Book should only leave fixtures.Category self._dumpdata_assert( ['sites', 'fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]', exclude_list=['fixtures.Article', 'fixtures.Book', 'sites']) # Excluding a bogus app should throw an error with six.assertRaisesRegex(self, management.CommandError, "Unknown app in excludes: foo_app"): self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['foo_app']) # Excluding a bogus model should throw an error with six.assertRaisesRegex(self, management.CommandError, "Unknown model in excludes: fixtures.FooModel"): self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['fixtures.FooModel']) def test_dumpdata_with_filtering_manager(self): spy1 = Spy.objects.create(name='Paul') spy2 = Spy.objects.create(name='Alex', cover_blown=True) self.assertQuerysetEqual(Spy.objects.all(), ['<Spy: Paul>']) # Use the default manager self._dumpdata_assert(['fixtures.Spy'], '[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % spy1.pk) # Dump using Django's base manager. Should return all objects, # even those normally filtered by the manager self._dumpdata_assert(['fixtures.Spy'], '[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": true}}, {"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % (spy2.pk, spy1.pk), use_base_manager=True) def test_compress_format_loading(self): # Load fixture 4 (compressed), using format specification management.call_command('loaddata', 'fixture4.json', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django pets kitten>', ]) def test_compressed_specified_loading(self): # Load fixture 5 (compressed), using format *and* compression specification management.call_command('loaddata', 'fixture5.json.zip', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: WoW subscribers now outnumber readers>', ]) def test_compressed_loading(self): # Load fixture 5 (compressed), only compression specification management.call_command('loaddata', 'fixture5.zip', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: WoW subscribers now outnumber readers>', ]) def test_ambiguous_compressed_fixture(self): # The name "fixture5" is ambigous, so loading it will raise an error with self.assertRaises(management.CommandError) as cm: management.call_command('loaddata', 'fixture5', verbosity=0, commit=False) self.assertIn("Multiple fixtures named 'fixture5'", cm.exception.args[0]) def test_db_loading(self): # Load db fixtures 1 and 2. These will load using the 'default' database identifier implicitly management.call_command('loaddata', 'db_fixture_1', verbosity=0, commit=False) management.call_command('loaddata', 'db_fixture_2', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Who needs more than one database?>', '<Article: Who needs to use compressed data?>', ]) def test_loaddata_error_message(self): """ Verifies that loading a fixture which contains an invalid object outputs an error message which contains the pk of the object that triggered the error. """ # MySQL needs a little prodding to reject invalid data. # This won't affect other tests because the database connection # is closed at the end of each test. if connection.vendor == 'mysql': connection.cursor().execute("SET sql_mode = 'TRADITIONAL'") with self.assertRaises(IntegrityError) as cm: management.call_command('loaddata', 'invalid.json', verbosity=0, commit=False) self.assertIn("Could not load fixtures.Article(pk=1):", cm.exception.args[0]) def test_loading_using(self): # Load db fixtures 1 and 2. These will load using the 'default' database identifier explicitly management.call_command('loaddata', 'db_fixture_1', verbosity=0, using='default', commit=False) management.call_command('loaddata', 'db_fixture_2', verbosity=0, using='default', commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Who needs more than one database?>', '<Article: Who needs to use compressed data?>', ]) def test_unmatched_identifier_loading(self): # Try to load db fixture 3. This won't load because the database identifier doesn't match management.call_command('loaddata', 'db_fixture_3', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), []) management.call_command('loaddata', 'db_fixture_3', verbosity=0, using='default', commit=False) self.assertQuerysetEqual(Article.objects.all(), []) def test_output_formats(self): # Load back in fixture 1, we need the articles from it management.call_command('loaddata', 'fixture1', verbosity=0, commit=False) # Try to load fixture 6 using format discovery management.call_command('loaddata', 'fixture6', verbosity=0, commit=False) self.assertQuerysetEqual(Tag.objects.all(), [ '<Tag: <Article: Time to reform copyright> tagged "copyright">', '<Tag: <Article: Time to reform copyright> tagged "law">' ]) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "law", "tagged_id": 3}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]', natural_keys=True) # Dump the current contents of the database as an XML fixture self._dumpdata_assert(['fixtures'], """<?xml version="1.0" encoding="utf-8"?> <django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker has no place on ESPN</field><field type="DateTimeField" name="pub_date">2006-06-16T12:00:00</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Time to reform copyright</field><field type="DateTimeField" name="pub_date">2006-06-16T13:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">law</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Prince</field></object><object pk="10" model="fixtures.book"><field type="CharField" name="name">Achieving self-awareness of Python programs</field><field to="fixtures.person" name="authors" rel="ManyToManyRel"></field></object></django-objects>""", format='xml', natural_keys=True) class FixtureTransactionTests(DumpDataAssertMixin, TransactionTestCase): @skipUnlessDBFeature('supports_forward_references') def test_format_discovery(self): # Load fixture 1 again, using format discovery management.call_command('loaddata', 'fixture1', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Try to load fixture 2 using format discovery; this will fail # because there are two fixture2's in the fixtures directory with self.assertRaises(management.CommandError) as cm: management.call_command('loaddata', 'fixture2', verbosity=0) self.assertIn("Multiple fixtures named 'fixture2'", cm.exception.args[0]) # object list is unaffected self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ]) # Dump the current contents of the database as a JSON fixture self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 10, "model": "fixtures.book", "fields": {"name": "Achieving self-awareness of Python programs", "authors": []}}]') # Load fixture 4 (compressed), using format discovery management.call_command('loaddata', 'fixture4', verbosity=0, commit=False) self.assertQuerysetEqual(Article.objects.all(), [ '<Article: Django pets kitten>', '<Article: Time to reform copyright>', '<Article: Poker has no place on ESPN>', ])
bsd-3-clause
chaoallsome/pip
pip/_vendor/html5lib/trie/py.py
1323
1775
from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import text_type from bisect import bisect_left from ._base import Trie as ABCTrie class Trie(ABCTrie): def __init__(self, data): if not all(isinstance(x, text_type) for x in data.keys()): raise TypeError("All keys must be strings") self._data = data self._keys = sorted(data.keys()) self._cachestr = "" self._cachepoints = (0, len(data)) def __contains__(self, key): return key in self._data def __len__(self): return len(self._data) def __iter__(self): return iter(self._data) def __getitem__(self, key): return self._data[key] def keys(self, prefix=None): if prefix is None or prefix == "" or not self._keys: return set(self._keys) if prefix.startswith(self._cachestr): lo, hi = self._cachepoints start = i = bisect_left(self._keys, prefix, lo, hi) else: start = i = bisect_left(self._keys, prefix) keys = set() if start == len(self._keys): return keys while self._keys[i].startswith(prefix): keys.add(self._keys[i]) i += 1 self._cachestr = prefix self._cachepoints = (start, i) return keys def has_keys_with_prefix(self, prefix): if prefix in self._data: return True if prefix.startswith(self._cachestr): lo, hi = self._cachepoints i = bisect_left(self._keys, prefix, lo, hi) else: i = bisect_left(self._keys, prefix) if i == len(self._keys): return False return self._keys[i].startswith(prefix)
mit
alexmerser/overholt
overholt/core.py
9
4483
# -*- coding: utf-8 -*- """ overholt.core ~~~~~~~~~~~~~ core module """ from flask_mail import Mail from flask_sqlalchemy import SQLAlchemy from flask_security import Security #: Flask-SQLAlchemy extension instance db = SQLAlchemy() #: Flask-Mail extension instance mail = Mail() #: Flask-Security extension instance security = Security() class OverholtError(Exception): """Base application error class.""" def __init__(self, msg): self.msg = msg class OverholtFormError(Exception): """Raise when an error processing a form occurs.""" def __init__(self, errors=None): self.errors = errors class Service(object): """A :class:`Service` instance encapsulates common SQLAlchemy model operations in the context of a :class:`Flask` application. """ __model__ = None def _isinstance(self, model, raise_error=True): """Checks if the specified model instance matches the service's model. By default this method will raise a `ValueError` if the model is not the expected type. :param model: the model instance to check :param raise_error: flag to raise an error on a mismatch """ rv = isinstance(model, self.__model__) if not rv and raise_error: raise ValueError('%s is not of type %s' % (model, self.__model__)) return rv def _preprocess_params(self, kwargs): """Returns a preprocessed dictionary of parameters. Used by default before creating a new instance or updating an existing instance. :param kwargs: a dictionary of parameters """ kwargs.pop('csrf_token', None) return kwargs def save(self, model): """Commits the model to the database and returns the model :param model: the model to save """ self._isinstance(model) db.session.add(model) db.session.commit() return model def all(self): """Returns a generator containing all instances of the service's model. """ return self.__model__.query.all() def get(self, id): """Returns an instance of the service's model with the specified id. Returns `None` if an instance with the specified id does not exist. :param id: the instance id """ return self.__model__.query.get(id) def get_all(self, *ids): """Returns a list of instances of the service's model with the specified ids. :param *ids: instance ids """ return self.__model__.query.filter(self.__model__.id.in_(ids)).all() def find(self, **kwargs): """Returns a list of instances of the service's model filtered by the specified key word arguments. :param **kwargs: filter parameters """ return self.__model__.query.filter_by(**kwargs) def first(self, **kwargs): """Returns the first instance found of the service's model filtered by the specified key word arguments. :param **kwargs: filter parameters """ return self.find(**kwargs).first() def get_or_404(self, id): """Returns an instance of the service's model with the specified id or raises an 404 error if an instance with the specified id does not exist. :param id: the instance id """ return self.__model__.query.get_or_404(id) def new(self, **kwargs): """Returns a new, unsaved instance of the service's model class. :param **kwargs: instance parameters """ return self.__model__(**self._preprocess_params(kwargs)) def create(self, **kwargs): """Returns a new, saved instance of the service's model class. :param **kwargs: instance parameters """ return self.save(self.new(**kwargs)) def update(self, model, **kwargs): """Returns an updated instance of the service's model class. :param model: the model to update :param **kwargs: update parameters """ self._isinstance(model) for k, v in self._preprocess_params(kwargs).items(): setattr(model, k, v) self.save(model) return model def delete(self, model): """Immediately deletes the specified model instance. :param model: the model instance to delete """ self._isinstance(model) db.session.delete(model) db.session.commit()
mit
BeetMan/shadowsocks-android
gfwlist/gen.py
304
1535
#!/usr/bin/python # -*- encoding: utf8 -*- import itertools import math import sys import IPy def main(): china_list_set = IPy.IPSet() for line in sys.stdin: line_params = line.split("|") if len(line_params) < 5 or line_params[2] != "ipv4" or line_params[1] != "CN": continue ip_addr = line_params[3] ip_length = float(line_params[4]) ip_mask = 32 - int(math.ceil(math.log(ip_length, 2))) china_list_set.add(IPy.IP("%s/%d" % (ip_addr, ip_mask))) # 添加内网地址 internal_list = IPy.IPSet(map(IPy.IP, [ "0.0.0.0/8", "10.0.0.0/8", "100.64.0.0/10", "112.124.47.0/24", "114.114.114.0/24", "127.0.0.0/8", "169.254.0.0/16", "172.16.0.0/12", "192.0.0.0/29", "192.0.2.0/24", "192.88.99.0/24", "192.168.0.0/16", "198.18.0.0/15", "198.51.100.0/24", "203.0.113.0/24", "224.0.0.0/4", "240.0.0.0/4", ])) china_list_set += internal_list all = china_list_set # 取反 # all = IPy.IPSet([IPy.IP("0.0.0.0/0")]) # 剔除所有孤立的C段 # for ip in china_list_set: # all.discard(ip) # filter = itertools.ifilter(lambda x: len(x) <= 65536, all) # for ip in filter: # all.discard(ip) # all.add(IPy.IP(ip.strNormal(0)).make_net('255.255.0.0')) # 输出结果 for ip in all: print '<item>' + str(ip) + '</item>' if __name__ == "__main__": main()
gpl-3.0
OkBuilds/buck
third-party/py/unittest2/unittest2/test/test_discovery.py
111
13372
import os import re import sys import unittest2 class TestDiscovery(unittest2.TestCase): # Heavily mocked tests so I can avoid hitting the filesystem def test_get_name_from_path(self): loader = unittest2.TestLoader() loader._top_level_dir = '/foo' name = loader._get_name_from_path('/foo/bar/baz.py') self.assertEqual(name, 'bar.baz') if not __debug__: # asserts are off return self.assertRaises(AssertionError, loader._get_name_from_path, '/bar/baz.py') def test_find_tests(self): loader = unittest2.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir path_lists = [['test1.py', 'test2.py', 'not_a_test.py', 'test_dir', 'test.foo', 'test-not-a-module.py', 'another_dir'], ['test3.py', 'test4.py', ]] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) def isdir(path): return path.endswith('dir') os.path.isdir = isdir self.addCleanup(restore_isdir) def isfile(path): # another_dir is not a package and so shouldn't be recursed into return not path.endswith('dir') and not 'another_dir' in path os.path.isfile = isfile self.addCleanup(restore_isfile) loader._get_module_from_name = lambda path: path + ' module' loader.loadTestsFromModule = lambda module: module + ' tests' top_level = os.path.abspath('/foo') loader._top_level_dir = top_level suite = list(loader._find_tests(top_level, 'test*.py')) expected = [name + ' module tests' for name in ('test1', 'test2')] expected.extend([('test_dir.%s' % name) + ' module tests' for name in ('test3', 'test4')]) self.assertEqual(suite, expected) def test_find_tests_with_package(self): loader = unittest2.TestLoader() original_listdir = os.listdir def restore_listdir(): os.listdir = original_listdir original_isfile = os.path.isfile def restore_isfile(): os.path.isfile = original_isfile original_isdir = os.path.isdir def restore_isdir(): os.path.isdir = original_isdir directories = ['a_directory', 'test_directory', 'test_directory2'] path_lists = [directories, [], [], []] os.listdir = lambda path: path_lists.pop(0) self.addCleanup(restore_listdir) os.path.isdir = lambda path: True self.addCleanup(restore_isdir) os.path.isfile = lambda path: os.path.basename(path) not in directories self.addCleanup(restore_isfile) class Module(object): paths = [] load_tests_args = [] def __init__(self, path): self.path = path self.paths.append(path) if os.path.basename(path) == 'test_directory': def load_tests(loader, tests, pattern): self.load_tests_args.append((loader, tests, pattern)) return 'load_tests' self.load_tests = load_tests def __eq__(self, other): return self.path == other.path # Silence py3k warning __hash__ = None loader._get_module_from_name = lambda name: Module(name) def loadTestsFromModule(module, use_load_tests): if use_load_tests: raise self.failureException('use_load_tests should be False for packages') return module.path + ' module tests' loader.loadTestsFromModule = loadTestsFromModule loader._top_level_dir = '/foo' # this time no '.py' on the pattern so that it can match # a test package suite = list(loader._find_tests('/foo', 'test*')) # We should have loaded tests from the test_directory package by calling load_tests # and directly from the test_directory2 package self.assertEqual(suite, ['load_tests', 'test_directory2' + ' module tests']) self.assertEqual(Module.paths, ['test_directory', 'test_directory2']) # load_tests should have been called once with loader, tests and pattern self.assertEqual(Module.load_tests_args, [(loader, 'test_directory' + ' module tests', 'test*')]) def test_discover(self): loader = unittest2.TestLoader() original_isfile = os.path.isfile original_isdir = os.path.isdir def restore_isfile(): os.path.isfile = original_isfile os.path.isfile = lambda path: False self.addCleanup(restore_isfile) orig_sys_path = sys.path[:] def restore_path(): sys.path[:] = orig_sys_path self.addCleanup(restore_path) full_path = os.path.abspath(os.path.normpath('/foo')) self.assertRaises(ImportError, loader.discover, '/foo/bar', top_level_dir='/foo') self.assertEqual(loader._top_level_dir, full_path) self.assertIn(full_path, sys.path) os.path.isfile = lambda path: True os.path.isdir = lambda path: True def restore_isdir(): os.path.isdir = original_isdir self.addCleanup(restore_isdir) _find_tests_args = [] def _find_tests(start_dir, pattern): _find_tests_args.append((start_dir, pattern)) return ['tests'] loader._find_tests = _find_tests loader.suiteClass = str suite = loader.discover('/foo/bar/baz', 'pattern', '/foo/bar') top_level_dir = os.path.abspath(os.path.normpath('/foo/bar')) start_dir = os.path.abspath(os.path.normpath('/foo/bar/baz')) self.assertEqual(suite, "['tests']") self.assertEqual(loader._top_level_dir, top_level_dir) self.assertEqual(_find_tests_args, [(start_dir, 'pattern')]) self.assertIn(top_level_dir, sys.path) def test_discover_with_modules_that_fail_to_import(self): loader = unittest2.TestLoader() listdir = os.listdir os.listdir = lambda _: ['test_this_does_not_exist.py'] isfile = os.path.isfile os.path.isfile = lambda _: True orig_sys_path = sys.path[:] def restore(): os.path.isfile = isfile os.listdir = listdir sys.path[:] = orig_sys_path self.addCleanup(restore) suite = loader.discover('.') self.assertIn(os.getcwd(), sys.path) self.assertEqual(suite.countTestCases(), 1) test = list(list(suite)[0])[0] # extract test from suite self.assertRaises(ImportError, lambda: test.test_this_does_not_exist()) def test_command_line_handling_parseArgs(self): # Haha - take that uninstantiable class program = object.__new__(unittest2.TestProgram) args = [] def do_discovery(argv): args.extend(argv) program._do_discovery = do_discovery program.parseArgs(['something', 'discover']) self.assertEqual(args, []) program.parseArgs(['something', 'discover', 'foo', 'bar']) self.assertEqual(args, ['foo', 'bar']) def test_command_line_handling_do_discovery_too_many_arguments(self): class Stop(Exception): pass def usageExit(): raise Stop program = object.__new__(unittest2.TestProgram) program.usageExit = usageExit self.assertRaises(Stop, # too many args lambda: program._do_discovery(['one', 'two', 'three', 'four'])) def test_command_line_handling_do_discovery_calls_loader(self): program = object.__new__(unittest2.TestProgram) class Loader(object): args = [] def discover(self, start_dir, pattern, top_level_dir): self.args.append((start_dir, pattern, top_level_dir)) return 'tests' program._do_discovery(['-v'], Loader=Loader) self.assertEqual(program.verbosity, 2) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['--verbose'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery([], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['fish', 'eggs'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['fish', 'eggs', 'ham'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', 'ham')]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['-s', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'test*.py', None)]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['-t', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'test*.py', 'fish')]) Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(['-p', 'fish'], Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('.', 'fish', None)]) self.assertFalse(program.failfast) self.assertFalse(program.catchbreak) args = ['-p', 'eggs', '-s', 'fish', '-v', '-f'] try: import signal except ImportError: signal = None else: args.append('-c') Loader.args = [] program = object.__new__(unittest2.TestProgram) program._do_discovery(args, Loader=Loader) self.assertEqual(program.test, 'tests') self.assertEqual(Loader.args, [('fish', 'eggs', None)]) self.assertEqual(program.verbosity, 2) self.assertTrue(program.failfast) if signal is not None: self.assertTrue(program.catchbreak) def test_detect_module_clash(self): class Module(object): __file__ = 'bar/foo.py' sys.modules['foo'] = Module full_path = os.path.abspath('foo') original_listdir = os.listdir original_isfile = os.path.isfile original_isdir = os.path.isdir def cleanup(): os.listdir = original_listdir os.path.isfile = original_isfile os.path.isdir = original_isdir del sys.modules['foo'] if full_path in sys.path: sys.path.remove(full_path) self.addCleanup(cleanup) def listdir(_): return ['foo.py'] def isfile(_): return True def isdir(_): return True os.listdir = listdir os.path.isfile = isfile os.path.isdir = isdir loader = unittest2.TestLoader() mod_dir = os.path.abspath('bar') expected_dir = os.path.abspath('foo') msg = re.escape(r"'foo' module incorrectly imported from %r. Expected %r. " "Is this module globally installed?" % (mod_dir, expected_dir)) self.assertRaisesRegexp( ImportError, '^%s$' % msg, loader.discover, start_dir='foo', pattern='foo.py' ) self.assertEqual(sys.path[0], full_path) def test_discovery_from_dotted_path(self): loader = unittest2.TestLoader() tests = [self] expectedPath = os.path.abspath(os.path.dirname(unittest2.test.__file__)) self.wasRun = False def _find_tests(start_dir, pattern): self.wasRun = True self.assertEqual(start_dir, expectedPath) return tests loader._find_tests = _find_tests suite = loader.discover('unittest2.test') self.assertTrue(self.wasRun) self.assertEqual(suite._tests, tests) if __name__ == '__main__': unittest2.main()
apache-2.0
timlinux/QGIS
python/plugins/processing/algs/gdal/ClipRasterByMask.py
18
12039
# -*- coding: utf-8 -*- """ *************************************************************************** ClipRasterByMask.py --------------------- Date : September 2013 Copyright : (C) 2013 by Alexander Bruy Email : alexander bruy at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Alexander Bruy' __date__ = 'September 2013' __copyright__ = '(C) 2013, Alexander Bruy' import os from qgis.PyQt.QtGui import QIcon from qgis.core import (QgsRasterFileWriter, QgsProcessing, QgsProcessingException, QgsProcessingParameterDefinition, QgsProcessingParameterFeatureSource, QgsProcessingParameterRasterLayer, QgsProcessingParameterCrs, QgsProcessingParameterEnum, QgsProcessingParameterString, QgsProcessingParameterNumber, QgsProcessingParameterBoolean, QgsProcessingParameterRasterDestination) from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm from processing.algs.gdal.GdalUtils import GdalUtils pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0] class ClipRasterByMask(GdalAlgorithm): INPUT = 'INPUT' MASK = 'MASK' SOURCE_CRS = 'SOURCE_CRS' TARGET_CRS = 'TARGET_CRS' NODATA = 'NODATA' ALPHA_BAND = 'ALPHA_BAND' CROP_TO_CUTLINE = 'CROP_TO_CUTLINE' KEEP_RESOLUTION = 'KEEP_RESOLUTION' SET_RESOLUTION = 'SET_RESOLUTION' X_RESOLUTION = 'X_RESOLUTION' Y_RESOLUTION = 'Y_RESOLUTION' OPTIONS = 'OPTIONS' DATA_TYPE = 'DATA_TYPE' MULTITHREADING = 'MULTITHREADING' EXTRA = 'EXTRA' OUTPUT = 'OUTPUT' def __init__(self): super().__init__() def initAlgorithm(self, config=None): self.TYPES = [self.tr('Use Input Layer Data Type'), 'Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64'] self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT, self.tr('Input layer'))) self.addParameter(QgsProcessingParameterFeatureSource(self.MASK, self.tr('Mask layer'), [QgsProcessing.TypeVectorPolygon])) self.addParameter(QgsProcessingParameterCrs(self.SOURCE_CRS, self.tr('Source CRS'), optional=True)) self.addParameter(QgsProcessingParameterCrs(self.TARGET_CRS, self.tr('Target CRS'), optional=True)) self.addParameter(QgsProcessingParameterNumber(self.NODATA, self.tr('Assign a specified nodata value to output bands'), type=QgsProcessingParameterNumber.Double, defaultValue=None, optional=True)) self.addParameter(QgsProcessingParameterBoolean(self.ALPHA_BAND, self.tr('Create an output alpha band'), defaultValue=False)) self.addParameter(QgsProcessingParameterBoolean(self.CROP_TO_CUTLINE, self.tr('Match the extent of the clipped raster to the extent of the mask layer'), defaultValue=True)) self.addParameter(QgsProcessingParameterBoolean(self.KEEP_RESOLUTION, self.tr('Keep resolution of input raster'), defaultValue=False)) self.addParameter(QgsProcessingParameterBoolean(self.SET_RESOLUTION, self.tr('Set output file resolution'), defaultValue=False)) self.addParameter(QgsProcessingParameterNumber(self.X_RESOLUTION, self.tr('X Resolution to output bands'), type=QgsProcessingParameterNumber.Double, defaultValue=None, optional=True)) self.addParameter(QgsProcessingParameterNumber(self.Y_RESOLUTION, self.tr('Y Resolution to output bands'), type=QgsProcessingParameterNumber.Double, defaultValue=None, optional=True)) multithreading_param = QgsProcessingParameterBoolean(self.MULTITHREADING, self.tr('Use multithreaded warping implementation'), defaultValue=False) multithreading_param.setFlags(multithreading_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced) self.addParameter(multithreading_param) options_param = QgsProcessingParameterString(self.OPTIONS, self.tr('Additional creation options'), defaultValue='', optional=True) options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced) options_param.setMetadata({ 'widget_wrapper': { 'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}}) self.addParameter(options_param) dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE, self.tr('Output data type'), self.TYPES, allowMultiple=False, defaultValue=0) dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced) self.addParameter(dataType_param) extra_param = QgsProcessingParameterString(self.EXTRA, self.tr('Additional command-line parameters'), defaultValue=None, optional=True) extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced) self.addParameter(extra_param) self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT, self.tr('Clipped (mask)'))) def name(self): return 'cliprasterbymasklayer' def displayName(self): return self.tr('Clip raster by mask layer') def icon(self): return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'raster-clip.png')) def group(self): return self.tr('Raster extraction') def groupId(self): return 'rasterextraction' def commandName(self): return 'gdalwarp' def getConsoleCommands(self, parameters, context, feedback, executing=True): inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context) if inLayer is None: raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT)) maskLayer, maskLayerName = self.getOgrCompatibleSource(self.MASK, parameters, context, feedback, executing) sourceCrs = self.parameterAsCrs(parameters, self.SOURCE_CRS, context) targetCrs = self.parameterAsCrs(parameters, self.TARGET_CRS, context) if self.NODATA in parameters and parameters[self.NODATA] is not None: nodata = self.parameterAsDouble(parameters, self.NODATA, context) else: nodata = None options = self.parameterAsString(parameters, self.OPTIONS, context) out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context) self.setOutputValue(self.OUTPUT, out) arguments = [] if sourceCrs.isValid(): arguments.append('-s_srs') arguments.append(GdalUtils.gdal_crs_string(sourceCrs)) if targetCrs.isValid(): arguments.append('-t_srs') arguments.append(GdalUtils.gdal_crs_string(targetCrs)) data_type = self.parameterAsEnum(parameters, self.DATA_TYPE, context) if data_type: arguments.append('-ot ' + self.TYPES[data_type]) arguments.append('-of') arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1])) if self.parameterAsBoolean(parameters, self.KEEP_RESOLUTION, context): arguments.append('-tr') arguments.append(str(inLayer.rasterUnitsPerPixelX())) arguments.append(str(-inLayer.rasterUnitsPerPixelY())) arguments.append('-tap') if self.parameterAsBoolean(parameters, self.SET_RESOLUTION, context): arguments.append('-tr') if self.X_RESOLUTION in parameters and parameters[self.X_RESOLUTION] is not None: xres = self.parameterAsDouble(parameters, self.X_RESOLUTION, context) arguments.append('{}'.format(xres)) else: arguments.append(str(inLayer.rasterUnitsPerPixelX())) if self.Y_RESOLUTION in parameters and parameters[self.Y_RESOLUTION] is not None: yres = self.parameterAsDouble(parameters, self.Y_RESOLUTION, context) arguments.append('{}'.format(yres)) else: arguments.append(str(-inLayer.rasterUnitsPerPixelY())) arguments.append('-tap') arguments.append('-cutline') arguments.append(maskLayer) arguments.append('-cl') arguments.append(maskLayerName) if self.parameterAsBoolean(parameters, self.CROP_TO_CUTLINE, context): arguments.append('-crop_to_cutline') if self.parameterAsBoolean(parameters, self.ALPHA_BAND, context): arguments.append('-dstalpha') if nodata is not None: arguments.append('-dstnodata {}'.format(nodata)) if self.parameterAsBoolean(parameters, self.MULTITHREADING, context): arguments.append('-multi') if options: arguments.extend(GdalUtils.parseCreationOptions(options)) if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''): extra = self.parameterAsString(parameters, self.EXTRA, context) arguments.append(extra) arguments.append(inLayer.source()) arguments.append(out) return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
gpl-2.0
h4ck3rm1k3/gcc_py_introspector
gcc/tree/attic/query_function_example.py
1
42931
import prefix import types import json #import pprint from graphviz import Digraph from SPARQLWrapper import SPARQLWrapper, XML, N3, JSONLD, JSON, POST, GET, SELECT, CONSTRUCT, ASK, DESCRIBE from SPARQLWrapper.Wrapper import _SPARQL_DEFAULT, _SPARQL_XML, _SPARQL_JSON, _SPARQL_POSSIBLE, _RDF_XML, _RDF_N3, _RDF_JSONLD, _RDF_POSSIBLE from SPARQLWrapper.SPARQLExceptions import QueryBadFormed # special tree, name only fdecl = { 'name' : 'function decl tree', 'exprs' : { 'node:function_decl': { 'fld:body': {'skip': 'yes'}, 'fld:args': {'node:parm_decl': '45'}, 'fld:mngl': {'node:identifier_node': '528'}, 'fld:name': {'node:identifier_node': '3082'}, }, } } just_vals = { 'name' : 'just values tree', 'exprs' : { 'node:function_decl': { 'fld:body': {'skip': 'yes'}, 'fld:args': {'node:parm_decl': '45'}, 'fld:mngl': {'node:identifier_node': '528'}, 'fld:name': {'node:identifier_node': '3082'}, }, } } stree = { 'name' : 'addr expr tree', 'exprs': { 'node:addr_expr': { 'fld:type': { 'node:function_decl': fdecl, #this could contain an entire function } } } } tree = { 'name' : 'main tree', 'exprs': { 'node:addr_expr': { 'fld:OP0': { 'node:pointer_type': '90' }, 'fld:type': { #u'node:function_decl': u'78', this could contain an entire function 'node:string_cst': '9', 'node:var_decl': '3' } }, 'node:array_ref': {'fld:OP0': {'node:component_ref': '3'}, 'fld:OP1': {'node:var_decl': '3'}}, 'node:bind_expr': {'fld:body': {'node:return_expr': '30', 'node:statement_list': '24'}, 'fld:vars': {'node:var_decl': '21'}}, 'node:bit_and_expr': {'fld:OP0': {'node:array_ref': '1', 'node:component_ref': '2', 'node:convert_expr': '4', 'node:nop_expr': '3', 'node:parm_decl': '2', 'node:plus_expr': '3'}, 'fld:OP1': {'node:bit_not_expr': '1', 'node:integer_cst': '13', 'node:var_decl': '1'}}, 'node:bit_ior_expr': {'fld:OP0': {'node:array_ref': '1', 'node:bit_and_expr': '3', 'node:bit_ior_expr': '1', 'node:nop_expr': '1'}, 'fld:OP1': {'node:bit_and_expr': '2', 'node:lshift_expr': '3', 'node:var_decl': '1'}}, 'node:bit_not_expr': {'fld:OP0': {'node:var_decl': '1'}}, 'node:call_expr': {'fld:E0': {'node:ge_expr': '6', 'node:integer_cst': '10', 'node:nop_expr': '23', 'node:parm_decl': '18', 'node:var_decl': '7'}, 'fld:E1': {'node:integer_cst': '12', 'node:nop_expr': '13', 'node:parm_decl': '8', 'node:var_decl': '2'}, 'fld:E2': {'node:integer_cst': '8', 'node:parm_decl': '6', 'node:var_decl': '2'}, 'fld:E3': {'node:integer_cst': '5', 'node:parm_decl': '2'}, 'fld:fn': {'node:addr_expr': '76', 'node:parm_decl': '1'}}, 'node:case_label_expr': {'fld:low': {'node:integer_cst': '4'}, 'fld:name': {'node:label_decl': '5'}}, 'node:component_ref': {'fld:OP0': {'node:indirect_ref': '25', 'node:var_decl': '1'}, 'fld:OP1': {'node:field_decl': '26'}}, 'node:compound_expr': {'fld:OP0': {'node:modify_expr': '2'}, 'fld:OP1': {'node:integer_cst': '2'}}, 'node:cond_expr': {'fld:OP0': {'node:eq_expr': '12', 'node:gt_expr': '2', 'node:le_expr': '2', 'node:lt_expr': '2', 'node:ne_expr': '28', 'node:truth_andif_expr': '14', 'node:truth_orif_expr': '4'}, 'fld:OP1': {'node:bind_expr': '2', 'node:call_expr': '16', 'node:cond_expr': '1', 'node:convert_expr': '2', 'node:goto_expr': '12', 'node:modify_expr': '9', 'node:nop_expr': '5', 'node:statement_list': '17'}, 'fld:OP2': {'node:call_expr': '4', 'node:cond_expr': '3', 'node:goto_expr': '12', 'node:integer_cst': '2', 'node:nop_expr': '6', 'node:parm_decl': '2', 'node:return_expr': '1'}}, 'node:const_decl': {#u'fld:chain': {u'node:const_decl': u'462', # u'node:type_decl': u'26'}, 'fld:cnst': {'node:integer_cst': '488'}, 'fld:name': {'node:identifier_node': '488'}, #u'fld:scpe': {u'node:translation_unit_decl': u'488'} }, 'node:convert_expr': {'fld:OP0': {'node:addr_expr': '1', 'node:call_expr': '1', 'node:parm_decl': '9', 'node:rshift_expr': '3'}}, 'node:eq_expr': {'fld:OP0': {'node:call_expr': '2', 'node:nop_expr': '16', 'node:parm_decl': '1', 'node:var_decl': '6'}, 'fld:OP1': {'node:integer_cst': '12', 'node:nop_expr': '7', 'node:parm_decl': '6'}}, 'node:field_decl': { #u'fld:bpos': {u'node:integer_cst': u'562'}, #u'fld:chain': {u'node:field_decl': u'427'}, 'fld:name': {'node:identifier_node': '545'}, 'fld:orig': {'node:field_decl': '2'}, #u'fld:size': {u'node:integer_cst': u'562'} }, 'node:function_decl': {'fld:args': {'node:parm_decl': '45'}, 'fld:body': {'node:bind_expr': '51'}, #u'fld:chain': {u'node:function_decl': u'3059', # u'node:type_decl': u'3', # u'node:var_decl': u'19'}, 'fld:mngl': {'node:identifier_node': '528'}, 'fld:name': {'node:identifier_node': '3082'}, #u'fld:scpe': {u'node:translation_unit_decl': u'2767'} }, 'node:ge_expr': {'fld:OP0': {'node:component_ref': '6'}, 'fld:OP1': {'node:component_ref': '6'}}, 'node:goto_expr': {'fld:labl': {'node:label_decl': '46'}}, 'node:gt_expr': {'fld:OP0': {'node:var_decl': '2'}, 'fld:OP1': {'node:integer_cst': '2'}}, 'node:indirect_ref': {'fld:OP0': {'node:call_expr': '2', 'node:nop_expr': '3', 'node:parm_decl': '38', 'node:pointer_plus_expr': '18', 'node:postincrement_expr': '7', 'node:var_decl': '15'}}, 'node:label_decl': {'fld:name': {'node:identifier_node': '1'}, #u'fld:scpe': {u'node:function_decl': u'47'} }, 'node:label_expr': {'fld:name': {'node:label_decl': '42'}}, 'node:le_expr': {'fld:OP0': {'node:nop_expr': '1', 'node:parm_decl': '1', 'node:plus_expr': '2'}, 'fld:OP1': {'node:integer_cst': '4'}}, 'node:lshift_expr': {'fld:OP0': {'node:bit_and_expr': '3', 'node:integer_cst': '3'}, 'fld:OP1': {'node:bit_and_expr': '3', 'node:integer_cst': '3'}}, 'node:lt_expr': {'fld:OP0': {'node:var_decl': '2'}, 'fld:OP1': {'node:integer_cst': '1', 'node:var_decl': '1'}}, 'node:modify_expr': {'fld:OP0': {'node:array_ref': '2', 'node:indirect_ref': '11', 'node:parm_decl': '1', 'node:result_decl': '50', 'node:var_decl': '49'}, 'fld:OP1': {'node:bit_and_expr': '1', 'node:bit_ior_expr': '4', 'node:call_expr': '18', 'node:compound_expr': '2', 'node:cond_expr': '14', 'node:convert_expr': '4', 'node:indirect_ref': '1', 'node:integer_cst': '34', 'node:modify_expr': '1', 'node:ne_expr': '3', 'node:nop_expr': '6', 'node:parm_decl': '2', 'node:plus_expr': '1', 'node:pointer_plus_expr': '1', 'node:postincrement_expr': '1', 'node:preincrement_expr': '1', 'node:trunc_div_expr': '1', 'node:var_decl': '18'}}, 'node:mult_expr': {'fld:OP0': {'node:nop_expr': '2', 'node:var_decl': '1'}, 'fld:OP1': {'node:integer_cst': '2', 'node:parm_decl': '1'}}, 'node:ne_expr': {'fld:OP0': {'node:bit_and_expr': '3', 'node:call_expr': '9', 'node:component_ref': '1', 'node:modify_expr': '2', 'node:nop_expr': '25', 'node:parm_decl': '1', 'node:var_decl': '18'}, 'fld:OP1': {'node:integer_cst': '48', 'node:parm_decl': '11'}}, 'node:nop_expr': {'fld:OP0': {'node:addr_expr': '13', 'node:array_ref': '1', 'node:bit_ior_expr': '1', 'node:call_expr': '7', 'node:component_ref': '2', 'node:convert_expr': '3', 'node:indirect_ref': '40', 'node:modify_expr': '3', 'node:mult_expr': '3', 'node:nop_expr': '3', 'node:parm_decl': '24', 'node:plus_expr': '3', 'node:postincrement_expr': '3', 'node:var_decl': '31'}}, 'node:parm_decl': {'fld:chain': {'node:parm_decl': '48'}, 'fld:name': {'node:identifier_node': '93'}, #u'fld:scpe': {u'node:function_decl': u'93'}, #u'fld:size': {u'node:integer_cst': u'93'} } , 'node:plus_expr': {'fld:OP0': {'node:nop_expr': '2', 'node:parm_decl': '6', 'node:var_decl': '2'}, 'fld:OP1': {'node:integer_cst': '9', 'node:var_decl': '1'}}, 'node:pointer_plus_expr': {'fld:OP0': {'node:indirect_ref': '2', 'node:parm_decl': '17'}, 'fld:OP1': {'node:integer_cst': '1', 'node:nop_expr': '18'}}, 'node:postdecrement_expr': {'fld:OP0': {'node:var_decl': '1'}, 'fld:OP1': {'node:integer_cst': '1'}}, 'node:postincrement_expr': {'fld:OP0': {'node:component_ref': '6', 'node:indirect_ref': '1', 'node:parm_decl': '2', 'node:var_decl': '3'}, 'fld:OP1': {'node:integer_cst': '12'}}, 'node:preincrement_expr': {'fld:OP0': {'node:parm_decl': '3', 'node:var_decl': '9'}, 'fld:OP1': {'node:integer_cst': '12'}}, 'node:result_decl': { #u'fld:scpe': {u'node:function_decl': u'49'}, # u'fld:size': {u'node:integer_cst': u'49'} }, 'node:return_expr': {'fld:expr': {'node:modify_expr': '50'}}, 'node:rshift_expr': {'fld:OP0': {'node:parm_decl': '3'}, 'fld:OP1': {'node:integer_cst': '3'}}, 'node:statement_list': {'fld:E0': {'node:call_expr': '4', 'node:case_label_expr': '1', 'node:decl_expr': '21', 'node:goto_expr': '2', 'node:modify_expr': '14'}, 'fld:E1': {'node:call_expr': '4', 'node:case_label_expr': '1', 'node:cond_expr': '7', 'node:decl_expr': '8', 'node:goto_expr': '12', 'node:label_expr': '4', 'node:modify_expr': '4', 'node:postincrement_expr': '1', 'node:switch_expr': '1'}, 'fld:E10': {'node:cond_expr': '2', 'node:label_expr': '1', 'node:modify_expr': '2'}, 'fld:E11': {'node:call_expr': '1', 'node:cond_expr': '1', 'node:modify_expr': '1', 'node:postdecrement_expr': '1', 'node:return_expr': '1'}, 'fld:E12': {'node:cond_expr': '1', 'node:goto_expr': '1', 'node:modify_expr': '1', 'node:return_expr': '1'}, 'fld:E13': {'node:case_label_expr': '1', 'node:label_expr': '1', 'node:modify_expr': '1'}, 'fld:E14': {'node:call_expr': '1', 'node:cond_expr': '2'}, 'fld:E15': {'node:label_expr': '1', 'node:return_expr': '1'}, 'fld:E16': {'node:return_expr': '1'}, 'fld:E2': {'node:call_expr': '2', 'node:case_label_expr': '1', 'node:cond_expr': '3', 'node:convert_expr': '1', 'node:decl_expr': '2', 'node:goto_expr': '2', 'node:label_expr': '8', 'node:modify_expr': '4', 'node:preincrement_expr': '2', 'node:return_expr': '6'}, 'fld:E3': {'node:call_expr': '2', 'node:cond_expr': '4', 'node:decl_expr': '2', 'node:label_expr': '3', 'node:modify_expr': '4', 'node:preincrement_expr': '6'}, 'fld:E4': {'node:call_expr': '2', 'node:cond_expr': '6', 'node:decl_expr': '1', 'node:label_expr': '7', 'node:modify_expr': '1', 'node:preincrement_expr': '3', 'node:return_expr': '1'}, 'fld:E5': {'node:call_expr': '1', 'node:cond_expr': '7', 'node:goto_expr': '3', 'node:label_expr': '4', 'node:modify_expr': '5'}, 'fld:E6': {'node:call_expr': '1', 'node:cond_expr': '3', 'node:goto_expr': '1', 'node:label_expr': '10', 'node:modify_expr': '3', 'node:return_expr': '2'}, 'fld:E7': {'node:bind_expr': '1', 'node:case_label_expr': '1', 'node:cond_expr': '3', 'node:goto_expr': '1', 'node:label_expr': '1', 'node:modify_expr': '3', 'node:return_expr': '6'}, 'fld:E8': {'node:cond_expr': '3', 'node:label_expr': '2', 'node:modify_expr': '2', 'node:return_expr': '1'}, 'fld:E9': {'node:cond_expr': '4', 'node:modify_expr': '1'}}, 'node:switch_expr': {'fld:body': {'node:statement_list': '1'}, 'fld:cond': {'node:var_decl': '1'}}, 'node:tree_list': {'fld:chan': {'node:tree_list': '2714'}, 'fld:purp': {'node:identifier_node': '488'}, 'fld:valu': {'node:integer_cst': '488'}}, 'node:trunc_div_expr': {'fld:OP0': {'node:nop_expr': '3', 'node:plus_expr': '1'}, 'fld:OP1': {'node:integer_cst': '4'}}, 'node:truth_andif_expr': {'fld:OP0': {'node:eq_expr': '1', 'node:ne_expr': '13', 'node:truth_andif_expr': '6'}, 'fld:OP1': {'node:eq_expr': '2', 'node:le_expr': '2', 'node:ne_expr': '15', 'node:truth_and_expr': '1'}}, 'node:truth_orif_expr': {'fld:OP0': {'node:eq_expr': '4', 'node:truth_orif_expr': '2'}, 'fld:OP1': {'node:eq_expr': '6'}}, 'node:type_decl': {#u'fld:chain': {u'node:const_decl': u'26', # u'node:function_decl': u'5', # u'node:type_decl': u'460'}, 'fld:name': {'node:identifier_node': '318'}, #u'fld:scpe': {u'node:translation_unit_decl': u'449'} }, 'node:var_decl': {#u'fld:chain': {u'node:function_decl': u'18', # u'node:label_decl': u'1', # u'node:var_decl': u'106'}, 'fld:init': {'node:indirect_ref': '3', 'node:integer_cst': '6', 'node:lshift_expr': '3', 'node:trunc_div_expr': '3', 'node:var_decl': '2'}, 'fld:name': {'node:identifier_node': '146'}, #u'fld:scpe': {u'node:function_decl': u'34', # u'node:translation_unit_decl': u'112'}, #u'fld:size': {u'node:integer_cst': u'134'} }, 'node:enumeral_type': { #{u'fld:csts': {u'node:tree_list': u'31'}, 'fld:max': {'node:integer_cst': '31'}, 'fld:min': {'node:integer_cst': '31'}, 'fld:name': {'node:identifier_node': '9', 'node:type_decl': '5'}, 'fld:size': {'node:integer_cst': '31'}, #u'fld:unql': {u'node:enumeral_type': u'5'} }, 'node:integer_type': {'fld:max': {'node:integer_cst': '188'}, 'fld:min': {'node:integer_cst': '188'}, 'fld:name': {'node:identifier_node': '2', 'node:type_decl': '157'}, 'fld:size': {'node:integer_cst': '188'}, #u'fld:unql': {u'node:integer_type': u'144'} }, 'node:pointer_type': {'fld:name': {'node:type_decl': '17'}, 'fld:ptd': {'node:array_type': '7', 'node:function_type': '77', 'node:integer_type': '40', 'node:pointer_type': '18', 'node:real_type': '6', 'node:record_type': '129', 'node:union_type': '2', 'node:vector_type': '3', 'node:void_type': '9'}, 'fld:size': {'node:integer_cst': '291'}, 'fld:unql': {'node:pointer_type': '62'}}, }, # here are the types of objects that are ignored 'types': { 'node:array_ref': {'fld:type': {'node:integer_type': '3'}}, 'node:array_type': {'fld:domn': {'node:integer_type': '49'}, 'fld:elts': {'node:integer_type': '36', 'node:pointer_type': '7', 'node:record_type': '14'}, 'fld:name': {'node:type_decl': '8'}, #u'fld:size': {u'node:integer_cst': u'49'}, 'fld:unql': {'node:array_type': '12'}}, 'node:bind_expr': {'fld:type': {'node:void_type': '54'}}, 'node:bit_and_expr': {'fld:type': {'node:integer_type': '15'}}, 'node:bit_ior_expr': {'fld:type': {'node:integer_type': '6'}}, 'node:bit_not_expr': {'fld:type': {'node:integer_type': '1'}}, 'node:boolean_type': {'fld:name': {'node:type_decl': '1'}, 'fld:size': {'node:integer_cst': '1'}}, 'node:call_expr': {'fld:type': {'node:integer_type': '46', 'node:pointer_type': '12', 'node:real_type': '1', 'node:void_type': '18'}}, 'node:case_label_expr': {'fld:type': {'node:void_type': '5'}}, 'node:complex_type': {'fld:name': {'node:type_decl': '4'}, 'fld:size': {'node:integer_cst': '5'}}, 'node:component_ref': {'fld:type': {'node:array_type': '3', 'node:enumeral_type': '1', 'node:integer_type': '2', 'node:pointer_type': '20'}}, 'node:compound_expr': {'fld:type': {'node:integer_type': '2'}}, 'node:cond_expr': {'fld:type': {'node:integer_type': '11', 'node:pointer_type': '3', 'node:void_type': '50'}}, 'node:const_decl': {'fld:type': {'node:enumeral_type': '488'}}, 'node:convert_expr': {'fld:type': {'node:integer_type': '11', 'node:pointer_type': '2', 'node:void_type': '1'}}, 'node:decl_expr': {'fld:type': {'node:void_type': '34'}}, 'node:enumeral_type': {'fld:csts': {'node:tree_list': '31'}, #u'fld:max': {u'node:integer_cst': u'31'}, #u'fld:min': {u'node:integer_cst': u'31'}, #u'fld:name': {u'node:identifier_node': u'9', # u'node:type_decl': u'5'}, #u'fld:size': {u'node:integer_cst': u'31'}, 'fld:unql': {'node:enumeral_type': '5'}}, 'node:eq_expr': {'fld:type': {'node:integer_type': '25'}}, 'node:pointer_type': { 'fld:name': {'node:type_decl': '17'}, 'fld:ptd': {'node:array_type': '7', 'node:function_type': '77', 'node:integer_type': '40', 'node:pointer_type': '18', 'node:real_type': '6', 'node:record_type': '129', 'node:union_type': '2', 'node:vector_type': '3', 'node:void_type': '9'}, 'fld:size': {'node:integer_cst': '291'}, 'fld:unql': {'node:pointer_type': '62'}}, 'node:field_decl': { #u'fld:scpe': {u'node:record_type': u'459', # u'node:union_type': u'103'}, 'fld:type': {'node:array_type': '42', 'node:enumeral_type': '4', 'node:integer_type': '290', 'node:pointer_type': '169', 'node:real_type': '2', 'node:record_type': '29', 'node:union_type': '26'}}, 'node:function_decl': {'fld:type': {'node:function_type': '3082'}}, 'node:function_type': {'fld:name': {'node:type_decl': '45'}, 'fld:prms': {'node:tree_list': '1102'}, 'fld:retn': {'node:boolean_type': '22', 'node:complex_type': '13', 'node:integer_type': '487', 'node:pointer_type': '310', 'node:real_type': '66', 'node:record_type': '4', 'node:vector_type': '58', 'node:void_type': '154'}, 'fld:size': {'node:integer_cst': '1114'}, 'fld:unql': {'node:function_type': '51'}}, 'node:ge_expr': {'fld:type': {'node:integer_type': '6'}}, 'node:goto_expr': {'fld:type': {'node:void_type': '46'}}, 'node:gt_expr': {'fld:type': {'node:integer_type': '2'}}, 'node:indirect_ref': {'fld:type': {'node:integer_type': '47', 'node:pointer_type': '11', 'node:record_type': '25'}}, 'node:integer_cst': {'fld:type': {'node:integer_type': '455', 'node:pointer_type': '12'}}, 'node:integer_type': {'fld:max': {'node:integer_cst': '188'}, 'fld:min': {'node:integer_cst': '188'}, 'fld:name': {'node:identifier_node': '2', 'node:type_decl': '157'}, 'fld:size': {'node:integer_cst': '188'}, 'fld:unql': {'node:integer_type': '144'}}, 'node:label_decl': {'fld:type': {'node:void_type': '47'}}, 'node:label_expr': {'fld:type': {'node:void_type': '42'}}, 'node:le_expr': {'fld:type': {'node:integer_type': '4'}}, 'node:lshift_expr': {'fld:type': {'node:integer_type': '6'}}, 'node:lt_expr': {'fld:type': {'node:integer_type': '2'}}, 'node:modify_expr': {'fld:type': {'node:integer_type': '76', 'node:pointer_type': '36', 'node:real_type': '1'}}, 'node:mult_expr': {'fld:type': {'node:integer_type': '3'}}, 'node:ne_expr': {'fld:type': {'node:integer_type': '59'}}, 'node:nop_expr': {'fld:type': {'node:integer_type': '103', 'node:pointer_type': '34'}}, 'node:parm_decl': {'fld:argt': {'node:integer_type': '49', 'node:pointer_type': '44'}, 'fld:type': {'node:integer_type': '49', 'node:pointer_type': '44'}}, 'node:plus_expr': {'fld:type': {'node:integer_type': '10'}}, 'node:pointer_plus_expr': {'fld:type': {'node:pointer_type': '19'}}, 'node:postdecrement_expr': {'fld:type': {'node:integer_type': '1'}}, 'node:postincrement_expr': {'fld:type': {'node:integer_type': '1', 'node:pointer_type': '11'}}, 'node:preincrement_expr': {'fld:type': {'node:integer_type': '7', 'node:pointer_type': '5'}}, 'node:real_type': {'fld:name': {'node:type_decl': '9'}, 'fld:size': {'node:integer_cst': '9'}, 'fld:unql': {'node:real_type': '2'}}, 'node:record_type': {'fld:flds': {'node:field_decl': '177'}, 'fld:name': {'node:identifier_node': '89', 'node:type_decl': '69'}, 'fld:size': {'node:integer_cst': '177'}, 'fld:unql': {'node:record_type': '79'}}, 'node:reference_type': {'fld:refd': {'node:pointer_type': '1'}, 'fld:size': {'node:integer_cst': '1'}}, 'node:result_decl': {'fld:type': {'node:integer_type': '41', 'node:pointer_type': '7', 'node:real_type': '1'}}, 'node:return_expr': {'fld:type': {'node:void_type': '51'}}, 'node:rshift_expr': {'fld:type': {'node:integer_type': '3'}}, 'node:string_cst': {'fld:type': {'node:array_type': '9'}}, 'node:switch_expr': {'fld:type': {'node:integer_type': '1'}}, 'node:tree_list': {'fld:valu': {'node:boolean_type': '9', 'node:complex_type': '12', 'node:enumeral_type': '15', 'node:integer_type': '811', 'node:pointer_type': '1227', 'node:real_type': '89', 'node:record_type': '3', 'node:reference_type': '3', 'node:union_type': '6', 'node:vector_type': '105', 'node:void_type': '4'}}, 'node:trunc_div_expr': {'fld:type': {'node:integer_type': '4'}}, 'node:truth_and_expr': {'fld:type': {'node:integer_type': '1'}}, 'node:truth_andif_expr': {'fld:type': {'node:integer_type': '20'}}, 'node:truth_orif_expr': {'fld:type': {'node:integer_type': '6'}}, 'node:type_decl': {'fld:type': {'node:array_type': '8', 'node:boolean_type': '1', 'node:complex_type': '5', 'node:enumeral_type': '31', 'node:function_type': '45', 'node:integer_type': '161', 'node:pointer_type': '17', 'node:real_type': '8', 'node:record_type': '167', 'node:union_type': '48', 'node:void_type': '2'}}, 'node:union_type': {'fld:flds': {'node:field_decl': '50'}, 'fld:name': {'node:identifier_node': '5', 'node:type_decl': '13'}, 'fld:size': {'node:integer_cst': '50'}, 'fld:unql': {'node:union_type': '14'}}, 'node:var_decl': {'fld:type': {'node:array_type': '14', 'node:integer_type': '95', 'node:pointer_type': '30', 'node:record_type': '7'}}, 'node:vector_type': {'fld:size': {'node:integer_cst': '12'}, 'fld:unql': {'node:vector_type': '1'}}, 'node:void_type': {'fld:name': {'node:type_decl': '5'}, 'fld:unql': {'node:void_type': '4'}}}} f = {} skip= { 'fld:source_file' :1 # dont need this in the document } def query(s): results = prefix.q( """ SELECT ?a ?p ?o ?t WHERE { <%s> ?p ?o. optional { ?o rdf:type ?t. } } """ % s) d={ 'node_id' : prefix.clean(s) } dt={ 'node_id' : None # literal has no type... } #pprint.pprint(results) for x in results['results']['bindings']: v = prefix.clean(x['o']['value']) t = None if 't' in x: t = prefix.clean(x['t']['value']) else: #pprint.pprint(x) pass # have no node type k = x['p']['value'] k = prefix.clean(k) if k not in d: if k not in skip: d[k]=v # the value of the field dt[k]=t # the domain type of the field object else: #d[k]=[d[k],v] raise Exception("duplicate") pprint.pprint({'query_results':d}, depth=2) return d, dt import types def recurse_ref(s, subtree): print("RECURSE for %s\n" % s) print("using subtree : %s" % subtree['name']) d,dt = query(s) pprint.pprint({"Got from db":d}) if 'rdf:type' not in d: return d st = d['rdf:type'] #print "st" + str(st) #pprint.pprint(dt) found = False if not 'exprs' in subtree: pprint.pprint({"bad subtree": subtree}, depth=2) raise Exception() lookup = subtree['exprs'] for k in d: r = None # result of the field ot = dt[k] v = d[k] u = prefix.tg +v if type(st) is dict: print('skip' + st) pprint.pprint({ 'case': 'is type', 'k' :k, 'ot' :ot, 'st' : st }, depth=2) #pprint.pprint(dt) #pass # no type elif not ot : # no type, a literal if k.startswith('fld:'): r = prefix.clean(v) # just a literal pprint.pprint({ 'case': 'is literal', 'k' :k, 'dt': dt, 'ot' :ot, 'st' : st }, depth=2) found = True else: pprint.pprint({ 'case': 'is no field', 'k' :k, 'ot' :ot, 'st' : st, 'r' : r, 'v' : v, }, depth=2) r = v # we need to store the type field found = True elif st in lookup: if k in lookup[st]: if ot in lookup[st][k]: subtree = lookup[st][k] if type(subtree) is dict: if 'exprs' in subtree: r = recurse_ref(u, subtree) pprint.pprint({"Subtree":r}, depth=2) else: r = recurse_ref(u, tree) pprint.pprint({"tree":r}, depth=2) else: r = recurse_ref(u, tree) pprint.pprint({"tree2":r}, depth=2) found = True else: pass # skip if not found: r = recurse_ref(u, just_vals ) # just get one level of info for types and such pprint.pprint({ "missing" : True, 'k' :k, 'ot' :ot, 'st' : st, 'u' :u, 'r' :r }, depth=2) d[k]=r pprint.pprint({"rec found":d}, depth=2) return (d) # print out what field types occur def start(): t = {} results = prefix.q( """ SELECT ?a WHERE { ?a fld:srcp 'eval.c:216'. ?a fld:name [ fld:string 'parse_command']. ?a rdf:type nt:function_decl. } """) for x in results['results']['bindings']: print(x['a']['value']) r= recurse_ref(x['a']['value'],tree) o = open("data/body2.py","w") o.write("deep={v2}".format(v2=pprint.pformat(r))) o.close() start()
gpl-2.0