repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
yitian134/chromium | tools/sync-webkit-git.py | 94 | 7648 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Update third_party/WebKit using git.
Under the assumption third_party/WebKit is a clone of git.webkit.org,
we can use git commands to make it match the version requested by DEPS.
See http://code.google.com/p/chromium/wiki/UsingWebKitGit for details on
how to use this.
"""
import logging
import optparse
import os
import re
import subprocess
import sys
import urllib
def RunGit(command):
"""Run a git subcommand, returning its output."""
# On Windows, use shell=True to get PATH interpretation.
command = ['git'] + command
logging.info(' '.join(command))
shell = (os.name == 'nt')
proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE)
out = proc.communicate()[0].strip()
logging.info('Returned "%s"' % out)
return out
def GetOverrideShortBranchName():
"""Returns the user-configured override branch name, if any."""
override_config_name = 'chromium.sync-branch'
return RunGit(['config', '--get', override_config_name])
def GetGClientBranchName():
"""Returns the name of the magic branch that lets us know that DEPS is
managing the update cycle."""
# Is there an override branch specified?
override_branch_name = GetOverrideShortBranchName()
if not override_branch_name:
return 'refs/heads/gclient' # No override, so return the default branch.
# Verify that the branch from config exists.
ref_branch = 'refs/heads/' + override_branch_name
current_head = RunGit(['show-ref', '--hash', ref_branch])
if current_head:
return ref_branch
# Inform the user about the problem and how to fix it.
print ("The specified override branch ('%s') doesn't appear to exist." %
override_branch_name)
print "Please fix your git config value '%s'." % overide_config_name
sys.exit(1)
def GetWebKitRev():
"""Extract the 'webkit_revision' variable out of DEPS."""
locals = {'Var': lambda _: locals["vars"][_],
'From': lambda *args: None}
execfile('DEPS', {}, locals)
return locals['vars']['webkit_revision']
def GetWebKitRevFromTarball(version):
"""Extract the 'webkit_revision' variable out of tarball DEPS."""
deps_url = "http://src.chromium.org/svn/releases/" + version + "/DEPS"
f = urllib.urlopen(deps_url)
s = f.read()
m = re.search('(?<=/Source@)\w+', s)
return m.group(0)
def FindSVNRev(branch_name, target_rev):
"""Map an SVN revision to a git hash.
Like 'git svn find-rev' but without the git-svn bits."""
# We iterate through the commit log looking for "git-svn-id" lines,
# which contain the SVN revision of that commit. We can stop once
# we've found our target (or hit a revision number lower than what
# we're looking for, indicating not found).
target_rev = int(target_rev)
# regexp matching the "commit" line from the log.
commit_re = re.compile(r'^commit ([a-f\d]{40})$')
# regexp matching the git-svn line from the log.
git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ')
if not branch_name:
branch_name = 'origin/master'
cmd = ['git', 'log', '--no-color', '--first-parent', '--pretty=medium',
branch_name]
logging.info(' '.join(cmd))
log = subprocess.Popen(cmd, shell=(os.name == 'nt'), stdout=subprocess.PIPE)
# Track whether we saw a revision *later* than the one we're seeking.
saw_later = False
for line in log.stdout:
match = commit_re.match(line)
if match:
commit = match.group(1)
continue
match = git_svn_re.match(line)
if match:
rev = int(match.group(1))
if rev <= target_rev:
log.stdout.close() # Break pipe.
if rev < target_rev:
if not saw_later:
return None # Can't be sure whether this rev is ok.
print ("WARNING: r%d not found, so using next nearest earlier r%d" %
(target_rev, rev))
return commit
else:
saw_later = True
print "Error: reached end of log without finding commit info."
print "Something has likely gone horribly wrong."
return None
def GetRemote():
branch = GetOverrideShortBranchName()
if not branch:
branch = 'gclient'
remote = RunGit(['config', '--get', 'branch.' + branch + '.remote'])
if remote:
return remote
return 'origin'
def UpdateGClientBranch(branch_name, webkit_rev, magic_gclient_branch):
"""Update the magic gclient branch to point at |webkit_rev|.
Returns: true if the branch didn't need changes."""
target = FindSVNRev(branch_name, webkit_rev)
if not target:
print "r%s not available; fetching." % webkit_rev
subprocess.check_call(['git', 'fetch', GetRemote()],
shell=(os.name == 'nt'))
target = FindSVNRev(branch_name, webkit_rev)
if not target:
print "ERROR: Couldn't map r%s to a git revision." % webkit_rev
sys.exit(1)
current = RunGit(['show-ref', '--hash', magic_gclient_branch])
if current == target:
return False # No change necessary.
subprocess.check_call(['git', 'update-ref', '-m', 'gclient sync',
magic_gclient_branch, target],
shell=(os.name == 'nt'))
return True
def UpdateCurrentCheckoutIfAppropriate(magic_gclient_branch):
"""Reset the current gclient branch if that's what we have checked out."""
branch = RunGit(['symbolic-ref', '-q', 'HEAD'])
if branch != magic_gclient_branch:
print "We have now updated the 'gclient' branch, but third_party/WebKit"
print "has some other branch ('%s') checked out." % branch
print "Run 'git checkout gclient' under third_party/WebKit if you want"
print "to switch it to the version requested by DEPS."
return 1
if subprocess.call(['git', 'diff-index', '--exit-code', '--shortstat',
'HEAD'], shell=(os.name == 'nt')):
print "Resetting tree state to new revision."
subprocess.check_call(['git', 'reset', '--hard'], shell=(os.name == 'nt'))
def main():
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true')
parser.add_option('-r', '--revision', help="switch to desired revision")
parser.add_option('-t', '--tarball', help="switch to desired tarball release")
parser.add_option('-b', '--branch', help="branch name that gclient generate")
options, args = parser.parse_args()
if options.verbose:
logging.basicConfig(level=logging.INFO)
if not os.path.exists('third_party/WebKit/.git'):
if os.path.exists('third_party/WebKit'):
print "ERROR: third_party/WebKit appears to not be under git control."
else:
print "ERROR: third_party/WebKit could not be found."
print "Did you run this script from the right directory?"
print "See http://code.google.com/p/chromium/wiki/UsingWebKitGit for"
print "setup instructions."
return 1
if options.revision:
webkit_rev = options.revision
if options.tarball:
print "WARNING: --revision is given, so ignore --tarball"
else:
if options.tarball:
webkit_rev = GetWebKitRevFromTarball(options.tarball)
else:
webkit_rev = GetWebKitRev()
print 'Desired revision: r%s.' % webkit_rev
os.chdir('third_party/WebKit')
magic_gclient_branch = GetGClientBranchName()
changed = UpdateGClientBranch(options.branch, webkit_rev,
magic_gclient_branch)
if changed:
return UpdateCurrentCheckoutIfAppropriate(magic_gclient_branch)
else:
print "Already on correct revision."
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
jairot/meliscore | meliscore/front/dataset.py | 1 | 4373 | import pandas as pd
import requests
import json
import collections
from datetime import datetime
from queries import *
import numpy as np
from pandas import DataFrame
import os
URL_BASE = "https://api.mercadolibre.com/"
def get_selling_speeds(itemids):
"""
Given a list of itemids it calculates
the number of items sold by hour since
the beginning of the sale
"""
data = get_items(itemids, ["id","start_time","sold_quantity", "price"])
data = pd.read_json(json.dumps(data))
data['elapsed_time'] = datetime.now() - data.start_time
# data['elapsed_hours'] = data.elapsed_time / np.timedelta64(1,'h')
data['elapsed_days'] = data.elapsed_time / np.timedelta64(1,'D')
data['speed'] = data.sold_quantity / data.elapsed_days
return data[['price', 'speed']]
def simplify_item(item, prefix, sep):
"""
Given an item result from the API
it removes all nested information and returns
a plain json that is more dataframe-friendly
"""
items = []
for k, v in item.items():
new_key = prefix + sep + k if prefix else k
if isinstance(v, collections.MutableMapping):
items.extend(simplify_item(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
def price_quantiles(df):
if 'price' in df.columns:
prices = df['price']
first, second, third = prices.quantile([.25, .5, .75])
q = {'first quantile': first,
'second quantile': second,
'third quantile': third}
return q
else:
raise NameError('price column does not exist')
def find_seller_score(users):
scores = []
for user in users:
seller_score = user["seller_reputation"]["power_seller_status"]
scores = scores + [seller_score]
return pd.Series(scores)
def find_imgcount(items):
imgcount = []
for item in items:
item_id = item['id']
n_imgs = get_imgcount(item_id)
imgcount = imgcount + [n_imgs]
return pd.Series(imgcount)
def find_item_score(items):
scores = []
for item in items:
item_score = item["listing_type_id"]
scores = scores + [item_score]
return pd.Series(scores)
def create_dataset(item, reduced=False, extra_features=False):
category_id = item.get('category_id')
condition = item.get('condition')
fname = '%s_%s_%s.csv' % (category_id, condition, 'red' if reduced else 'full')
# TODO: guarda con el False!!!!
if os.path.exists(fname) and False:
df = pd.read_csv(fname, encoding='utf-8')
else:
response = requests.get(URL_BASE + 'sites/MLA/search?category={}&condition={}'.format(category_id, condition))
data = response.json()
limit = data['paging']['limit']
offset = 0
items_number = min(data['paging']['total'], 500)
while offset < items_number:
print offset
response = requests.get(URL_BASE + 'sites/MLA/search?category=' + category_id + '&offset=' + str(offset))
data = response.json()
items = [simplify_item(i, '', '_') for i in data['results']]
page_df = pd.read_json(json.dumps(items))
if offset == 0:
df = page_df
else:
df = df.append(page_df)
offset += limit
if reduced:
# reduce dataFrame to items with stock
# (from which we can calculate a selling price)
df = df[(df.available_quantity > 5) | (df.id == item['id'])]
df_speeds = get_selling_speeds(list(df.id))
df['speed'] = df_speeds.speed
if extra_features:
items = get_items(list(df['id']), ['id',"listing_type_id"])
users = get_users(list(df['id']), ['seller_reputation'])
df['seller_score'] = find_seller_score(users)
df['item_score'] = find_item_score(items)
df['n_images'] = find_imgcount(items)
df.to_csv(fname, encoding='utf-8')
return df
def create_dataset_from_item(item):
"""
Create the dataset from an item dict.
:param item: the item dict.
:return:
"""
create_dataset(item.get('category_id'))
if __name__ == '__main__':
# iPhone 5 16gb
category_id = 'MLA121408'
create_dataset(category_id)
| bsd-3-clause |
sameetb-cuelogic/edx-platform-test | common/djangoapps/student/migrations/0017_rename_date_to_created.py | 188 | 10469 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Rename 'date' field to 'created'
db.rename_column('student_courseenrollment', 'date', 'created')
def backwards(self, orm):
# Rename 'created' field to 'date'
db.rename_column('student_courseenrollment', 'created', 'date')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.courseenrollment': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'telephone_number': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
| agpl-3.0 |
ntddk/pemu | scripts/qapi-commands.py | 73 | 12372 | #
# QAPI command marshaller generator
#
# Copyright IBM, Corp. 2011
# Copyright (C) 2014 Red Hat, Inc.
#
# Authors:
# Anthony Liguori <aliguori@us.ibm.com>
# Michael Roth <mdroth@linux.vnet.ibm.com>
# Markus Armbruster <armbru@redhat.com>
#
# This work is licensed under the terms of the GNU GPL, version 2.
# See the COPYING file in the top-level directory.
from ordereddict import OrderedDict
from qapi import *
import re
import sys
import os
import getopt
import errno
def type_visitor(name):
if type(name) == list:
return 'visit_type_%sList' % name[0]
else:
return 'visit_type_%s' % name
def generate_command_decl(name, args, ret_type):
arglist=""
for argname, argtype, optional, structured in parse_args(args):
argtype = c_type(argtype, is_param=True)
if optional:
arglist += "bool has_%s, " % c_var(argname)
arglist += "%s %s, " % (argtype, c_var(argname))
return mcgen('''
%(ret_type)s qmp_%(name)s(%(args)sError **errp);
''',
ret_type=c_type(ret_type), name=c_fun(name), args=arglist).strip()
def gen_err_check(errvar):
if errvar:
return mcgen('''
if (local_err) {
goto out;
}
''')
return ''
def gen_sync_call(name, args, ret_type, indent=0):
ret = ""
arglist=""
retval=""
if ret_type:
retval = "retval = "
for argname, argtype, optional, structured in parse_args(args):
if optional:
arglist += "has_%s, " % c_var(argname)
arglist += "%s, " % (c_var(argname))
push_indent(indent)
ret = mcgen('''
%(retval)sqmp_%(name)s(%(args)s&local_err);
''',
name=c_fun(name), args=arglist, retval=retval).rstrip()
if ret_type:
ret += "\n" + gen_err_check('local_err')
ret += "\n" + mcgen(''''
%(marshal_output_call)s
''',
marshal_output_call=gen_marshal_output_call(name, ret_type)).rstrip()
pop_indent(indent)
return ret.rstrip()
def gen_marshal_output_call(name, ret_type):
if not ret_type:
return ""
return "qmp_marshal_output_%s(retval, ret, &local_err);" % c_fun(name)
def gen_visitor_input_containers_decl(args, obj):
ret = ""
push_indent()
if len(args) > 0:
ret += mcgen('''
QmpInputVisitor *mi = qmp_input_visitor_new_strict(%(obj)s);
QapiDeallocVisitor *md;
Visitor *v;
''',
obj=obj)
pop_indent()
return ret.rstrip()
def gen_visitor_input_vars_decl(args):
ret = ""
push_indent()
for argname, argtype, optional, structured in parse_args(args):
if optional:
ret += mcgen('''
bool has_%(argname)s = false;
''',
argname=c_var(argname))
if is_c_ptr(argtype):
ret += mcgen('''
%(argtype)s %(argname)s = NULL;
''',
argname=c_var(argname), argtype=c_type(argtype))
else:
ret += mcgen('''
%(argtype)s %(argname)s = {0};
''',
argname=c_var(argname), argtype=c_type(argtype))
pop_indent()
return ret.rstrip()
def gen_visitor_input_block(args, dealloc=False):
ret = ""
errparg = '&local_err'
errarg = 'local_err'
if len(args) == 0:
return ret
push_indent()
if dealloc:
errparg = 'NULL'
errarg = None;
ret += mcgen('''
qmp_input_visitor_cleanup(mi);
md = qapi_dealloc_visitor_new();
v = qapi_dealloc_get_visitor(md);
''')
else:
ret += mcgen('''
v = qmp_input_get_visitor(mi);
''')
for argname, argtype, optional, structured in parse_args(args):
if optional:
ret += mcgen('''
visit_optional(v, &has_%(c_name)s, "%(name)s", %(errp)s);
''',
c_name=c_var(argname), name=argname, errp=errparg)
ret += gen_err_check(errarg)
ret += mcgen('''
if (has_%(c_name)s) {
''',
c_name=c_var(argname))
push_indent()
ret += mcgen('''
%(visitor)s(v, &%(c_name)s, "%(name)s", %(errp)s);
''',
c_name=c_var(argname), name=argname, argtype=argtype,
visitor=type_visitor(argtype), errp=errparg)
ret += gen_err_check(errarg)
if optional:
pop_indent()
ret += mcgen('''
}
''')
if dealloc:
ret += mcgen('''
qapi_dealloc_visitor_cleanup(md);
''')
pop_indent()
return ret.rstrip()
def gen_marshal_output(name, args, ret_type, middle_mode):
if not ret_type:
return ""
ret = mcgen('''
static void qmp_marshal_output_%(c_name)s(%(c_ret_type)s ret_in, QObject **ret_out, Error **errp)
{
Error *local_err = NULL;
QmpOutputVisitor *mo = qmp_output_visitor_new();
QapiDeallocVisitor *md;
Visitor *v;
v = qmp_output_get_visitor(mo);
%(visitor)s(v, &ret_in, "unused", &local_err);
if (local_err) {
goto out;
}
*ret_out = qmp_output_get_qobject(mo);
out:
error_propagate(errp, local_err);
qmp_output_visitor_cleanup(mo);
md = qapi_dealloc_visitor_new();
v = qapi_dealloc_get_visitor(md);
%(visitor)s(v, &ret_in, "unused", NULL);
qapi_dealloc_visitor_cleanup(md);
}
''',
c_ret_type=c_type(ret_type), c_name=c_fun(name),
visitor=type_visitor(ret_type))
return ret
def gen_marshal_input_decl(name, args, ret_type, middle_mode):
if middle_mode:
return 'int qmp_marshal_input_%s(Monitor *mon, const QDict *qdict, QObject **ret)' % c_fun(name)
else:
return 'static void qmp_marshal_input_%s(QDict *args, QObject **ret, Error **errp)' % c_fun(name)
def gen_marshal_input(name, args, ret_type, middle_mode):
hdr = gen_marshal_input_decl(name, args, ret_type, middle_mode)
ret = mcgen('''
%(header)s
{
Error *local_err = NULL;
''',
header=hdr)
if middle_mode:
ret += mcgen('''
QDict *args = (QDict *)qdict;
''')
if ret_type:
if is_c_ptr(ret_type):
retval = " %s retval = NULL;" % c_type(ret_type)
else:
retval = " %s retval;" % c_type(ret_type)
ret += mcgen('''
%(retval)s
''',
retval=retval)
if len(args) > 0:
ret += mcgen('''
%(visitor_input_containers_decl)s
%(visitor_input_vars_decl)s
%(visitor_input_block)s
''',
visitor_input_containers_decl=gen_visitor_input_containers_decl(args, "QOBJECT(args)"),
visitor_input_vars_decl=gen_visitor_input_vars_decl(args),
visitor_input_block=gen_visitor_input_block(args))
else:
ret += mcgen('''
(void)args;
''')
ret += mcgen('''
%(sync_call)s
''',
sync_call=gen_sync_call(name, args, ret_type, indent=4))
if re.search('^ *goto out\\;', ret, re.MULTILINE):
ret += mcgen('''
out:
''')
if not middle_mode:
ret += mcgen('''
error_propagate(errp, local_err);
''')
ret += mcgen('''
%(visitor_input_block_cleanup)s
''',
visitor_input_block_cleanup=gen_visitor_input_block(args,
dealloc=True))
if middle_mode:
ret += mcgen('''
if (local_err) {
qerror_report_err(local_err);
error_free(local_err);
return -1;
}
return 0;
''')
else:
ret += mcgen('''
return;
''')
ret += mcgen('''
}
''')
return ret
def option_value_matches(opt, val, cmd):
if opt in cmd and cmd[opt] == val:
return True
return False
def gen_registry(commands):
registry=""
push_indent()
for cmd in commands:
options = 'QCO_NO_OPTIONS'
if option_value_matches('success-response', 'no', cmd):
options = 'QCO_NO_SUCCESS_RESP'
registry += mcgen('''
qmp_register_command("%(name)s", qmp_marshal_input_%(c_name)s, %(opts)s);
''',
name=cmd['command'], c_name=c_fun(cmd['command']),
opts=options)
pop_indent()
ret = mcgen('''
static void qmp_init_marshal(void)
{
%(registry)s
}
qapi_init(qmp_init_marshal);
''',
registry=registry.rstrip())
return ret
def gen_command_decl_prologue(header, guard, prefix=""):
ret = mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI function prototypes
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "%(prefix)sqapi-types.h"
#include "qapi/qmp/qdict.h"
#include "qapi/error.h"
''',
header=basename(header), guard=guardname(header), prefix=prefix)
return ret
def gen_command_def_prologue(prefix="", proxy=False):
ret = mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QMP->QAPI command dispatch
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "qemu-common.h"
#include "qemu/module.h"
#include "qapi/qmp/qerror.h"
#include "qapi/qmp/types.h"
#include "qapi/qmp/dispatch.h"
#include "qapi/visitor.h"
#include "qapi/qmp-output-visitor.h"
#include "qapi/qmp-input-visitor.h"
#include "qapi/dealloc-visitor.h"
#include "%(prefix)sqapi-types.h"
#include "%(prefix)sqapi-visit.h"
''',
prefix=prefix)
if not proxy:
ret += '#include "%sqmp-commands.h"' % prefix
return ret + "\n\n"
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "chp:i:o:m",
["source", "header", "prefix=",
"input-file=", "output-dir=",
"type=", "middle"])
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
output_dir = ""
prefix = ""
dispatch_type = "sync"
c_file = 'qmp-marshal.c'
h_file = 'qmp-commands.h'
middle_mode = False
do_c = False
do_h = False
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-i", "--input-file"):
input_file = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
elif o in ("-t", "--type"):
dispatch_type = a
elif o in ("-m", "--middle"):
middle_mode = True
elif o in ("-c", "--source"):
do_c = True
elif o in ("-h", "--header"):
do_h = True
if not do_c and not do_h:
do_c = True
do_h = True
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
def maybe_open(really, name, opt):
if really:
return open(name, opt)
else:
import StringIO
return StringIO.StringIO()
try:
os.makedirs(output_dir)
except os.error, e:
if e.errno != errno.EEXIST:
raise
exprs = parse_schema(input_file)
commands = filter(lambda expr: expr.has_key('command'), exprs)
commands = filter(lambda expr: not expr.has_key('gen'), commands)
if dispatch_type == "sync":
fdecl = maybe_open(do_h, h_file, 'w')
fdef = maybe_open(do_c, c_file, 'w')
ret = gen_command_decl_prologue(header=basename(h_file), guard=guardname(h_file), prefix=prefix)
fdecl.write(ret)
ret = gen_command_def_prologue(prefix=prefix)
fdef.write(ret)
for cmd in commands:
arglist = []
ret_type = None
if cmd.has_key('data'):
arglist = cmd['data']
if cmd.has_key('returns'):
ret_type = cmd['returns']
ret = generate_command_decl(cmd['command'], arglist, ret_type) + "\n"
fdecl.write(ret)
if ret_type:
ret = gen_marshal_output(cmd['command'], arglist, ret_type, middle_mode) + "\n"
fdef.write(ret)
if middle_mode:
fdecl.write('%s;\n' % gen_marshal_input_decl(cmd['command'], arglist, ret_type, middle_mode))
ret = gen_marshal_input(cmd['command'], arglist, ret_type, middle_mode) + "\n"
fdef.write(ret)
fdecl.write("\n#endif\n");
if not middle_mode:
ret = gen_registry(commands)
fdef.write(ret)
fdef.flush()
fdef.close()
fdecl.flush()
fdecl.close()
| gpl-2.0 |
MakersLab/Farm-server | server/lib/utils.py | 1 | 1752 | import yaml
import json
import os
def loadFromFile(file_name):
fileContents = ''
path = '/'.join(os.path.dirname(__file__).split('/')[0:-1])
with open((os.path.join(path,file_name)), 'r') as file:
fileContents = file.read()
file.close()
return fileContents
def loadConfig(file_name):
return yaml.load(loadFromFile(file_name))
def loadJsonObject(file_name):
return json.loads(loadFromFile(file_name))
def writeFile(file_name, content):
path = '/'.join(os.path.dirname(__file__).split('/')[0:-1])
with open((os.path.join(path,file_name)), 'w') as file:
file.write(content)
file.close()
def writeJsonObject(file_name, object):
writeFile(file_name, json.dumps(object))
def removeUnnecessaryData(config):
for printer in config['printers']:
config['printers'][printer] = {
'name': config['printers'][printer]['name'],
'url': config['printers'][printer]['url'],
}
return config
def getOfflinePrinterDictionary():
return {
'state': 'Printer is unreachable',
'progress': 0,
'nozzleTemperature': 0,
'bedTemperature': 0,
'fileName': '',
'timePrinting': 0,
'timeRemaining': 0,
}
def getUnreachablePrinterDictionary():
return {
'state': 'Octoprint is unreachable',
'progress': 0,
'nozzleTemperature': 0,
'bedTemperature': 0,
'fileName': '',
'timePrinting': 0,
'timeRemaining': 0,
}
def translatePrinterNamesToPrinterObjects(printerNames, printersConfig):
printers = {}
for printerName in printerNames:
printers[printerName] = printersConfig['printers'][printerName]
return printers | gpl-3.0 |
evernote/evernote-sdk-python3 | lib/thrift/TSerialization.py | 111 | 1389 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from .protocol import TBinaryProtocol
from .transport import TTransport
def serialize(thrift_object,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer()
protocol = protocol_factory.getProtocol(transport)
thrift_object.write(protocol)
return transport.getvalue()
def deserialize(base,
buf,
protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()):
transport = TTransport.TMemoryBuffer(buf)
protocol = protocol_factory.getProtocol(transport)
base.read(protocol)
return base
| bsd-2-clause |
itsvetkov/pyqtgraph | pyqtgraph/dockarea/Container.py | 2 | 8553 | # -*- coding: utf-8 -*-
from ..Qt import QtCore, QtGui
import weakref
class Container(object):
#sigStretchChanged = QtCore.Signal() ## can't do this here; not a QObject.
def __init__(self, area):
object.__init__(self)
self.area = area
self._container = None
self._stretch = (10, 10)
self.stretches = weakref.WeakKeyDictionary()
def container(self):
return self._container
def containerChanged(self, c):
self._container = c
def type(self):
return None
def insert(self, new, pos=None, neighbor=None):
if not isinstance(new, list):
new = [new]
if neighbor is None:
if pos == 'before':
index = 0
else:
index = self.count()
else:
index = self.indexOf(neighbor)
if index == -1:
index = 0
if pos == 'after':
index += 1
for n in new:
#print "change container", n, " -> ", self
n.containerChanged(self)
#print "insert", n, " -> ", self, index
self._insertItem(n, index)
index += 1
n.sigStretchChanged.connect(self.childStretchChanged)
#print "child added", self
self.updateStretch()
def apoptose(self, propagate=True):
##if there is only one (or zero) item in this container, disappear.
cont = self._container
c = self.count()
if c > 1:
return
if self.count() == 1: ## if there is one item, give it to the parent container (unless this is the top)
if self is self.area.topContainer:
return
self.container().insert(self.widget(0), 'before', self)
#print "apoptose:", self
self.close()
if propagate and cont is not None:
cont.apoptose()
def close(self):
self.area = None
self._container = None
self.setParent(None)
def childEvent(self, ev):
ch = ev.child()
if ev.removed() and hasattr(ch, 'sigStretchChanged'):
#print "Child", ev.child(), "removed, updating", self
try:
ch.sigStretchChanged.disconnect(self.childStretchChanged)
except:
pass
self.updateStretch()
def childStretchChanged(self):
#print "child", QtCore.QObject.sender(self), "changed shape, updating", self
self.updateStretch()
def setStretch(self, x=None, y=None):
#print "setStretch", self, x, y
self._stretch = (x, y)
self.sigStretchChanged.emit()
def updateStretch(self):
###Set the stretch values for this container to reflect its contents
pass
def stretch(self):
"""Return the stretch factors for this container"""
return self._stretch
class SplitContainer(Container, QtGui.QSplitter):
"""Horizontal or vertical splitter with some changes:
- save/restore works correctly
"""
sigStretchChanged = QtCore.Signal()
def __init__(self, area, orientation):
QtGui.QSplitter.__init__(self)
self.setOrientation(orientation)
Container.__init__(self, area)
#self.splitterMoved.connect(self.restretchChildren)
def _insertItem(self, item, index):
self.insertWidget(index, item)
item.show() ## need to show since it may have been previously hidden by tab
def saveState(self):
sizes = self.sizes()
if all([x == 0 for x in sizes]):
sizes = [10] * len(sizes)
return {'sizes': sizes}
def restoreState(self, state):
sizes = state['sizes']
self.setSizes(sizes)
for i in range(len(sizes)):
self.setStretchFactor(i, sizes[i])
def childEvent(self, ev):
QtGui.QSplitter.childEvent(self, ev)
Container.childEvent(self, ev)
#def restretchChildren(self):
#sizes = self.sizes()
#tot = sum(sizes)
class HContainer(SplitContainer):
def __init__(self, area):
SplitContainer.__init__(self, area, QtCore.Qt.Horizontal)
def type(self):
return 'horizontal'
def updateStretch(self):
##Set the stretch values for this container to reflect its contents
#print "updateStretch", self
x = 0
y = 0
sizes = []
for i in range(self.count()):
wx, wy = self.widget(i).stretch()
x += wx
y = max(y, wy)
sizes.append(wx)
#print " child", self.widget(i), wx, wy
self.setStretch(x, y)
#print sizes
tot = float(sum(sizes))
if tot == 0:
scale = 1.0
else:
scale = self.width() / tot
self.setSizes([int(s*scale) for s in sizes])
class VContainer(SplitContainer):
def __init__(self, area):
SplitContainer.__init__(self, area, QtCore.Qt.Vertical)
def type(self):
return 'vertical'
def updateStretch(self):
##Set the stretch values for this container to reflect its contents
#print "updateStretch", self
x = 0
y = 0
sizes = []
for i in range(self.count()):
wx, wy = self.widget(i).stretch()
y += wy
x = max(x, wx)
sizes.append(wy)
#print " child", self.widget(i), wx, wy
self.setStretch(x, y)
#print sizes
tot = float(sum(sizes))
if tot == 0:
scale = 1.0
else:
scale = self.height() / tot
self.setSizes([int(s*scale) for s in sizes])
class TContainer(Container, QtGui.QWidget):
sigStretchChanged = QtCore.Signal()
def __init__(self, area):
QtGui.QWidget.__init__(self)
Container.__init__(self, area)
self.layout = QtGui.QGridLayout()
self.layout.setSpacing(0)
self.layout.setContentsMargins(0,0,0,0)
self.setLayout(self.layout)
self.hTabLayout = QtGui.QHBoxLayout()
self.hTabBox = QtGui.QWidget()
self.hTabBox.setLayout(self.hTabLayout)
self.hTabLayout.setSpacing(2)
self.hTabLayout.setContentsMargins(0,0,0,0)
self.layout.addWidget(self.hTabBox, 0, 1)
self.stack = QtGui.QStackedWidget()
self.layout.addWidget(self.stack, 1, 1)
self.stack.childEvent = self.stackChildEvent
self.setLayout(self.layout)
for n in ['count', 'widget', 'indexOf']:
setattr(self, n, getattr(self.stack, n))
def _insertItem(self, item, index):
if not isinstance(item, Dock.Dock):
raise Exception("Tab containers may hold only docks, not other containers.")
self.stack.insertWidget(index, item)
self.hTabLayout.insertWidget(index, item.label)
#QtCore.QObject.connect(item.label, QtCore.SIGNAL('clicked'), self.tabClicked)
item.label.sigClicked.connect(self.tabClicked)
self.tabClicked(item.label)
def tabClicked(self, tab, ev=None):
if ev is None or ev.button() == QtCore.Qt.LeftButton:
for i in range(self.count()):
w = self.widget(i)
if w is tab.dock:
w.label.setDim(False)
self.stack.setCurrentIndex(i)
else:
w.label.setDim(True)
def raiseDock(self, dock):
"""Move *dock* to the top of the stack"""
self.stack.currentWidget().label.setDim(True)
self.stack.setCurrentWidget(dock)
dock.label.setDim(False)
def type(self):
return 'tab'
def saveState(self):
return {'index': self.stack.currentIndex()}
def restoreState(self, state):
self.stack.setCurrentIndex(state['index'])
def updateStretch(self):
##Set the stretch values for this container to reflect its contents
x = 0
y = 0
for i in range(self.count()):
wx, wy = self.widget(i).stretch()
x = max(x, wx)
y = max(y, wy)
self.setStretch(x, y)
def stackChildEvent(self, ev):
QtGui.QStackedWidget.childEvent(self.stack, ev)
Container.childEvent(self, ev)
from . import Dock
| mit |
mtorluemke/traffic_control | misc/kickstart_create_network_line.py | 13 | 11116 | #!/usr/bin/python
# Copyright 2015 Comcast Cable Communications Management, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
This reads a configuration file and checks for functioning
network links in /sys/class/net/*, then emits a ks.cfg network line.
'''
import os
import re
# Global bad:
global DEBUG
DEBUG = True
global TO_LOG
# This "logs" to stdout which is captured during kickstart
TO_LOG = True
# This is the standard interface we install to. It is set to a speed value of
# 5 (vice 100,1000 or 10000) later on and any other interface will override it
# if you've got something faster installed.
standard_interface=['p4p1']
## These are configuration settings:
# Name of Configuration file:
cfg_file = "network.cfg"
# Where linux is putting the interface stuff:
iface_dir = '/sys/class/net/'
ignore_interfaces = ['lo','bond']
# Where we kickstart mounts the ISO, and our config directory:
base_cfg_dir = '/mnt/stage2/ks_scripts/'
# Remember the ? makes the match non-greedy. This is important.
cfg_line = re.compile("\s*(?P<key>.*?)=(?P<value>.*)\s*$")
# Pick the interface speed for bonding, or "Auto".
# Auto assumes you want the fastest connections with more than 1 interface,
# Or if there's not 2 interfaces at the same speed you want the fastest.
# auto is expected to be a string, otherwise use integers:
# Speed is in megs. 1000 is 1 gig, 10000 is 10G.
iface_speed = 'auto'
restring="%(iface_dir)s(?P<iface>.*)/speed" %vars()
iface_search = re.compile(restring)
def read_config(config_file):
''' Reads our network config file and hands back a dict of key:value
pairs '''
net_cfg = {}
with open(config_file,'r') as cfg:
network_lines = cfg.readlines()
for line in network_lines:
if cfg_line.match(line):
key = cfg_line.match(line).group('key')
value = cfg_line.match(line).group('value')
net_cfg[key] = value
return net_cfg
def find_usable_net_devs(location):
''' Search through iface_dir looking for /speed files.
Build a dict keyed on speed (in otherwords the speed is the key with a list of
interfaces as teh value). '''
# We "pre-seed" the dictionary with the standard interface names at a
# speed of 5 so that if there's nothing else we set that up. This
# makes it easier to reconfigure later.
ifaces = {5:standard_interface}
bad_ifaces={}
devs = os.listdir(location)
for dev in devs:
dev_path = os.path.join(location,dev,'speed')
add=True
if os.path.isfile(dev_path):
with open(dev_path,'r') as iface:
try:
speed = iface.readlines()
# speed should only have one line:
speed = int(speed[0])
# if there is no link some drivers/cards/whatever will
# throw an IOError when you try to read the speed file.
except IOError:
speed = 0
# Other cards will return a -1, which is fine, but *some* of them
# return a 65535. Those we set to 0 as well.
if speed == 65535:
speed = 0
for i_face in ignore_interfaces:
if i_face in dev:
add = False
if speed <= 0:
add = False
if TO_LOG: print add, dev
if add:
if speed in ifaces:
this_speed = ifaces[speed]
this_speed.append(dev)
ifaces[speed]=this_speed
else:
ifaces[speed]=[dev]
else:
bad_ifaces[dev] = speed
print "We find these interfaces have link and might be useful:", ifaces
if TO_LOG: print "And these aren't useful:", bad_ifaces
return ifaces
def useable_interfaces(net_devs, nc, iface_speed):
''' This takes a go at figuring out which interfaces to use.'''
iface_list = False
notes = False
if TO_LOG: print "in usable interfaces"
if not ("bond" in nc['BOND_DEVICE'].lower()):
if TO_LOG: print "useable interfaces if not", nc['BOND_DEVICE']
# Not doing a bond, so we check to make sure the requested device,
# nc['BOND_DEVICE'], is in the list of devices with carrier:
if nc['BOND_DEVICE'] == '""':
# In this case we have no network interface in the configuration but we
# network settings.
# First we check how many net_devs we have:
if TO_LOG: print "nc['BOND_DEVICE']=''", len(net_devs), net_devs
if len(net_devs) == 1: # This is a dict of speed: devices
speeds = net_devs.keys()
speeds.sort(reverse=True)
speed = speeds[0]
possibles = net_devs[speed]
if TO_LOG: print possibles
# At this point we have options, but no information, so:
notes = "No device in the configuration file and multiple devices found. Picking the first"
iface_list = [possibles[0]]
else:
if TO_LOG: print "inner else"
for speed in net_devs:
if nc['BOND_DEVICE'] in net_devs[speed]:
iface_list = [nc['BOND_DEVICE']]
else:
iface_list = [nc['BOND_DEVICE']]
notes = "{0} did not have carrier at install time, and may not work".format(nc['BOND_DEVICE'])
elif iface_speed is not 'auto':
if len(net_devs[iface_speed]) > 0:
iface_list = net_devs[iface_speed]
else:
notes = "no devices set to {0}".format(iface_speed)
else: # This SHOULD be iface_speed == auto, and nc['BOND_DEVCE'] containing bond.
# if not it is anyway.
# Thus we are doing a bond of some sort.
# This gives us the fastest interfaces first:
speeds = net_devs.keys()
speeds.sort(reverse=True)
fastest = speeds[0]
# Walk through "speeds" and take the first one that has more than one
# interface. This will only set iface_list if there are 2 or more interfaces:
# previous_speed = 0
for i in speeds:
if len(net_devs[i]) > 1:
iface_list = net_devs[i]
break
if TO_LOG: print "iface list:", iface_list
# if iface_list is still false, and we are requesting a bond, we will
# want the fastest interface with link:
if (iface_list == False) and ("bond" in nc['BOND_DEVICE'].lower()):
if TO_LOG: print len(net_devs), net_devs, i
if len(net_devs) == 0:
iface_list = net_devs
notes = "no devices found for the bond. Will not have network after reboot"
else:
iface_list = net_devs[fastest] # This is assuming that we'll want to bond the fastest interaface.
if TO_LOG: print "dev:", net_devs[fastest]
if TO_LOG: print iface_list, notes
return iface_list, notes
# Find our network configuration file:
if os.path.isfile(os.path.join(base_cfg_dir,cfg_file)):
cfg_path = os.path.join(base_cfg_dir,cfg_file)
elif os.path.isfile(cfg_file):
cfg_path = cfg_file
else:
cfg_path = ''
if cfg_path:
nc = read_config(cfg_path)
else:
# if we don't have a working config file we use this
# The IPs and hostnames are bad.
nc = { IPADDR:"10.0.0.2",
NETMASK:"255.255.255.252",
GATEWAY:"10.0.0.1",
BOND_DEVICE:"bond0",
MTU:"9000",
NAMESERVER:"192.168.0.1",
HOSTNAME:"bad.example.com",
NETWORKING_IPV6:"yes",
IPV6ADDR:" 2001:0db8:0a0b:12f0:0000:0000:0000:0002/64",
IPV6_DEFAULTGW:" 2001:0db8:0a0b:12f0:0000:0000:0000:0001",
BONDING_OPTS:"miimon=100 mode=4 lacp_rate=fast xmit_hash_policy=layer3+4",
DHCP:"no" }
# This should be set to no in the config file, but that could change:
if not nc.has_key('DHCP'):
nc['DHCP']='no'
# This should be set to no in the config file, but that could change:
if not nc.has_key('DHCP'):
nc['DHCP']='no'
net_devs = find_usable_net_devs(iface_dir)
bondable_iface, iface_problems = useable_interfaces(net_devs, nc, iface_speed)
# turn bondable_iface into a string for the network line:
if bondable_iface and len(bondable_iface) > 1:
dev_list = bondable_iface
dev_str = dev_list.pop()
for d in dev_list:
dev_str="%(dev_str)s,%(d)s" %vars()
else:
dev_str = bondable_iface[0]
if ('y' in nc['NETWORKING_IPV6'].lower()) and re.search(":",nc['IPV6ADDR']):
temp_var = nc['IPV6ADDR']
IPV6 = "--ipv6=%(temp_var)s" %vars()
else:
if 'y' in nc['NETWORKING_IPV6'].lower():
if iface_problems is False:
iface_problems = "IPv6 enabled but no address provided"
else:
iface_problems = "{0} and IPv6 enabled but no address provided".format(iface_problems)
if re.search(":",nc['IPV6ADDR']):
if iface_problems is False:
iface_problems = "IPv6 is disabled, but IPV6ADDR was set to {0}".format(nc['IPV6ADDR'])
else:
iface_problems = "{0} and IPv6 is disabled, but IPV6ADDR was set to {1}".format(iface_problems,nc['IPV6ADDR'])
IPV6 = "--noipv6"
if "bond" in nc['BOND_DEVICE'].lower():
bond_stuff = "--device=%s --bondslaves=%s --bondopts=%s" %(nc['BOND_DEVICE'] , dev_str, nc['BONDING_OPTS'])
elif nc['BOND_DEVICE'] in dev_str:
bond_stuff = "--device={0}".format(nc["BOND_DEVICE"])
elif bondable_iface and nc['BOND_DEVICE'] == '""' :
print "**\nNo device (BOND_DEVICE) specified it he config, found {0} with link, using it. \n**\n".format(bondable_iface);
bond_stuff = "--device={0}".format(bondable_iface[0])
else:
print "**\n{0} not found within $usable_devices, setting anyway, this probably won't work\n**\n".format(nc["BOND_DEVICE"]);
bond_stuff = "--device={0}".format(nc["BOND_DEVICE"])
if 'yes' in nc['DHCP'].lower() or not bondable_iface:
network_line = "network --bootproto=dhcp --device={0} --hostname={1}".format(nc['BOND_DEVICE'], nc['HOSTNAME'])
else:
network_line = "network --bootproto=static {0} --activate {1} --ip={2} --netmask={3} --gateway={4} --nameserver={5} --mtu={6} --hostname={7} \n".format(
bond_stuff, IPV6, nc['IPADDR'], nc['NETMASK'], nc['GATEWAY'], nc['NAMESERVER'], nc['MTU'], nc['HOSTNAME'] )
if iface_problems:
network_line = "# Problems found: {0}\n{1}".format(iface_problems,network_line)
with open('/tmp/network_line','w') as OUT:
OUT.write(network_line)
| apache-2.0 |
InstitutoDOr/BROCCOLI | code/Python_Wrapper/broccoli/broccoli_common.py | 2 | 7605 | from broccoli_base import *
import numpy
from nibabel import nifti1
import os
import site
import shutil
BROCCOLI_LIB_BASE = BROCCOLI_LIB
# DONE: Check that passing arrays to C method as 1D packed arrays is the same as passing arays using the 3D array wrappers
# DONE: Check that packing (packVolume) and unpacking (unpackOutputVolume) results in the original input array
# DONE: Transpose and reshape until the two conditions above are not met
def load_MNI_templates(mni_file, mni_brain_file = None, mni_brain_mask_file = None):
if not mni_brain_file:
mni_brain_file = mni_file.replace('.nii', '_brain.nii')
if not mni_brain_mask_file:
mni_brain_mask_file = mni_file.replace('.nii', '_brain_mask.nii')
MNI_nni = nifti1.load(mni_file)
MNI = MNI_nni.get_data()
MNI_brain_nii = nifti1.load(mni_brain_file)
MNI_brain = MNI_brain_nii.get_data()
MNI_brain_mask_nii = nifti1.load(mni_brain_mask_file)
MNI_brain_mask = MNI_brain_mask_nii.get_data()
voxel_sizes = MNI_nni.get_header()['pixdim'][1:4]
return MNI, MNI_brain, MNI_brain_mask, voxel_sizes
def load_T1(t1_file):
T1_nni = nifti1.load(t1_file)
T1 = T1_nni.get_data()
T1_voxel_sizes = T1_nni.get_header()['pixdim'][1:4]
return T1, T1_voxel_sizes
def load_EPI(epi_file, only_volume=True):
EPI_nni = nifti1.load(epi_file)
EPI = EPI_nni.get_data()
if only_volume:
EPI = EPI[...,0]
EPI_voxel_sizes = EPI_nni.get_header()['pixdim'][2:5]
return EPI, EPI_voxel_sizes
_pack_permutation = (2, 0, 1)
_pack_permutation_4d = (3, 2, 0, 1)
def _permute(permutation, array):
n = len(array)
return [array[permutation[i]] for i in range(n)]
class BROCCOLI_LIB(BROCCOLI_LIB_BASE):
def __init__(self, *args):
BROCCOLI_LIB_BASE.__init__(self)
"""
This is a hack to prevent Python from free()-ing arrays
that have been packed and then passed to C
"""
self._input_arrays = []
if len(args) == 2:
self.OpenCLInitiate(*args)
def OpenCLInitiate(self, platform, device):
if not os.path.exists('broccoli_lib_kernel.cpp'):
for s in site.getsitepackages():
if os.path.exists(os.path.join(s, 'broccoli/broccoli_lib_kernel.cpp')):
shutil.copy(os.path.join(s, 'broccoli/broccoli_lib_kernel.cpp'), os.getcwd())
break
if os.path.exists('broccoli_lib_kernel.cpp'):
BROCCOLI_LIB_BASE.OpenCLInitiate(self, platform, device)
else:
raise RuntimeError('could not find broccoli_lib_kernel.cpp in current directory or in site-packages')
def SetEPIData(self, array, voxel_sizes):
self.SetEPIHeight(array.shape[0])
self.SetEPIWidth(array.shape[1])
self.SetEPIDepth(array.shape[2])
t = self.packVolume(array)
self.SetInputEPIVolume(t)
self.SetEPIVoxelSizeX(voxel_sizes[0])
self.SetEPIVoxelSizeY(voxel_sizes[1])
self.SetEPIVoxelSizeZ(voxel_sizes[2])
def SetT1Data(self, array, voxel_sizes):
self.SetT1Height(array.shape[0])
self.SetT1Width(array.shape[1])
self.SetT1Depth(array.shape[2])
t = self.packVolume(array)
self.SetInputT1Volume(t)
self.SetT1VoxelSizeX(voxel_sizes[0])
self.SetT1VoxelSizeY(voxel_sizes[1])
self.SetT1VoxelSizeZ(voxel_sizes[2])
def SetMNIData(self, array, voxel_sizes):
self.SetMNIHeight(array.shape[0])
self.SetMNIWidth(array.shape[1])
self.SetMNIDepth(array.shape[2])
t = self.packVolume(array)
self.SetInputMNIVolume(t)
self.SetMNIVoxelSizeX(voxel_sizes[0])
self.SetMNIVoxelSizeY(voxel_sizes[1])
self.SetMNIVoxelSizeZ(voxel_sizes[2])
def SetfMRIData(self, array, voxel_sizes):
self.SetEPIHeight(array.shape[0])
self.SetEPIWidth(array.shape[1])
self.SetEPIDepth(array.shape[2])
self.SetEPITimepoints(array.shape[3])
t = self.packVolume(array)
self.SetInputfMRIVolumes(t)
self.SetEPIVoxelSizeX(voxel_sizes[0])
self.SetEPIVoxelSizeY(voxel_sizes[1])
self.SetEPIVoxelSizeZ(voxel_sizes[2])
def SetParametricImageRegistrationFilters(self, filters):
args = []
for i in range(3):
args.append(self.packVolume(numpy.real(filters[i])))
args.append(self.packVolume(numpy.imag(filters[i])))
BROCCOLI_LIB_BASE.SetParametricImageRegistrationFilters(self, *args)
def SetNonParametricImageRegistrationFilters(self, filters):
args = []
for i in range(6):
args.append(self.packVolume(numpy.real(filters[i])))
args.append(self.packVolume(numpy.imag(filters[i])))
BROCCOLI_LIB_BASE.SetNonParametricImageRegistrationFilters(self, *args)
def SetProjectionTensorMatrixFilters(self, filters):
self.SetProjectionTensorMatrixFirstFilter(*filters[0])
self.SetProjectionTensorMatrixSecondFilter(*filters[1])
self.SetProjectionTensorMatrixThirdFilter(*filters[2])
self.SetProjectionTensorMatrixFourthFilter(*filters[3])
self.SetProjectionTensorMatrixFifthFilter(*filters[4])
self.SetProjectionTensorMatrixSixthFilter(*filters[5])
def packArray(self, array):
return numpy.ascontiguousarray(array, dtype=numpy.float32)
def packVolume(self, array):
if len(array.shape) == 3:
array = numpy.flipud(array)
t = array.transpose(_pack_permutation)
elif len(array.shape) == 4:
array = numpy.flipud(array)
t = array.transpose(_pack_permutation_4d)
else:
t = array
t = self.packArray(t.flatten())
self._input_arrays.append(t)
return t
def createOutputArray(self, shape, dtype=numpy.float32):
return numpy.empty(shape=shape, dtype=dtype).flatten()
def unpackOutputArray(self, array, shape):
return array.reshape(shape)
def unpackOutputVolume(self, array, shape = None):
unpack = None
if shape:
if len(shape) == 3:
t_shape = _permute(_pack_permutation, shape)
unpack = numpy.argsort(_pack_permutation)
elif len(shape) == 4:
t_shape = _permute(_pack_permutation_4d, shape)
unpack = numpy.argsort(_pack_permutation_4d)
else:
t_shape = shape
array = self.unpackOutputArray(array, t_shape)
array = numpy.fliplr(array)
if unpack is not None:
return array.transpose(unpack)
else:
return array
def printSetupErrors(self):
print("Get platform IDs error is %d" % self.GetOpenCLPlatformIDsError())
print("Get device IDs error is %d" % self.GetOpenCLDeviceIDsError())
print("Create context error is %d" % self.GetOpenCLCreateContextError())
print("Get create context info error is %d" % self.GetOpenCLContextInfoError())
print("Create command queue error is %d" % self.GetOpenCLCreateCommandQueueError())
print("Create program error is %d" % self.GetOpenCLCreateProgramError())
print("Build program error is %d" % self.GetOpenCLBuildProgramError())
print("Get program build info error is %d" % self.GetOpenCLProgramBuildInfoError())
numOpenKernels = self.GetNumberOfOpenCLKernels()
createKernelErrors = self.GetOpenCLCreateKernelErrors()
for i in range(numOpenKernels):
error = createKernelErrors[i]
if error:
print("Run kernel error %d is %d" % (i, error))
def printRunErrors(self):
numOpenKernels = self.GetNumberOfOpenCLKernels()
createBufferErrors = self.GetOpenCLCreateBufferErrors()
runKernelErrors = self.GetOpenCLRunKernelErrors()
for i in range(numOpenKernels):
if createBufferErrors[i]:
print("Create buffer error %d is %d" % (i, createBufferErrors[i]))
if runKernelErrors[i]:
print("Run kernel error %d is %d" % (i, runKernelErrors[i]))
| gpl-3.0 |
driftx/Telephus | telephus/cassandra/ttypes.py | 1 | 127886 | #
# Autogenerated by Thrift Compiler (0.7.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class ConsistencyLevel:
"""
The ConsistencyLevel is an enum that controls both read and write
behavior based on the ReplicationFactor of the keyspace. The
different consistency levels have different meanings, depending on
if you're doing a write or read operation.
If W + R > ReplicationFactor, where W is the number of nodes to
block for on write, and R the number to block for on reads, you
will have strongly consistent behavior; that is, readers will
always see the most recent write. Of these, the most interesting is
to do QUORUM reads and writes, which gives you consistency while
still allowing availability in the face of node failures up to half
of <ReplicationFactor>. Of course if latency is more important than
consistency then you can use lower values for either or both.
Some ConsistencyLevels (ONE, TWO, THREE) refer to a specific number
of replicas rather than a logical concept that adjusts
automatically with the replication factor. Of these, only ONE is
commonly used; TWO and (even more rarely) THREE are only useful
when you care more about guaranteeing a certain level of
durability, than consistency.
Write consistency levels make the following guarantees before reporting success to the client:
ANY Ensure that the write has been written once somewhere, including possibly being hinted in a non-target node.
ONE Ensure that the write has been written to at least 1 node's commit log and memory table
TWO Ensure that the write has been written to at least 2 node's commit log and memory table
THREE Ensure that the write has been written to at least 3 node's commit log and memory table
QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes
LOCAL_QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes, within the local datacenter (requires NetworkTopologyStrategy)
EACH_QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes in each datacenter (requires NetworkTopologyStrategy)
ALL Ensure that the write is written to <code><ReplicationFactor></code> nodes before responding to the client.
Read consistency levels make the following guarantees before returning successful results to the client:
ANY Not supported. You probably want ONE instead.
ONE Returns the record obtained from a single replica.
TWO Returns the record with the most recent timestamp once two replicas have replied.
THREE Returns the record with the most recent timestamp once three replicas have replied.
QUORUM Returns the record with the most recent timestamp once a majority of replicas have replied.
LOCAL_QUORUM Returns the record with the most recent timestamp once a majority of replicas within the local datacenter have replied.
EACH_QUORUM Returns the record with the most recent timestamp once a majority of replicas within each datacenter have replied.
ALL Returns the record with the most recent timestamp once all replicas have replied (implies no replica may be down)..
"""
ONE = 1
QUORUM = 2
LOCAL_QUORUM = 3
EACH_QUORUM = 4
ALL = 5
ANY = 6
TWO = 7
THREE = 8
_VALUES_TO_NAMES = {
1: "ONE",
2: "QUORUM",
3: "LOCAL_QUORUM",
4: "EACH_QUORUM",
5: "ALL",
6: "ANY",
7: "TWO",
8: "THREE",
}
_NAMES_TO_VALUES = {
"ONE": 1,
"QUORUM": 2,
"LOCAL_QUORUM": 3,
"EACH_QUORUM": 4,
"ALL": 5,
"ANY": 6,
"TWO": 7,
"THREE": 8,
}
class IndexOperator:
EQ = 0
GTE = 1
GT = 2
LTE = 3
LT = 4
_VALUES_TO_NAMES = {
0: "EQ",
1: "GTE",
2: "GT",
3: "LTE",
4: "LT",
}
_NAMES_TO_VALUES = {
"EQ": 0,
"GTE": 1,
"GT": 2,
"LTE": 3,
"LT": 4,
}
class IndexType:
KEYS = 0
CUSTOM = 1
COMPOSITES = 2
_VALUES_TO_NAMES = {
0: "KEYS",
1: "CUSTOM",
2: "COMPOSITES",
}
_NAMES_TO_VALUES = {
"KEYS": 0,
"CUSTOM": 1,
"COMPOSITES": 2,
}
class Compression:
"""
CQL query compression
"""
GZIP = 1
NONE = 2
_VALUES_TO_NAMES = {
1: "GZIP",
2: "NONE",
}
_NAMES_TO_VALUES = {
"GZIP": 1,
"NONE": 2,
}
class CqlResultType:
ROWS = 1
VOID = 2
INT = 3
_VALUES_TO_NAMES = {
1: "ROWS",
2: "VOID",
3: "INT",
}
_NAMES_TO_VALUES = {
"ROWS": 1,
"VOID": 2,
"INT": 3,
}
class Column:
"""
Basic unit of data within a ColumnFamily.
@param name, the name by which this column is set and retrieved. Maximum 64KB long.
@param value. The data associated with the name. Maximum 2GB long, but in practice you should limit it to small numbers of MB (since Thrift must read the full value into memory to operate on it).
@param timestamp. The timestamp is used for conflict detection/resolution when two columns with same name need to be compared.
@param ttl. An optional, positive delay (in seconds) after which the column will be automatically deleted.
Attributes:
- name
- value
- timestamp
- ttl
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'value', None, None, ), # 2
(3, TType.I64, 'timestamp', None, None, ), # 3
(4, TType.I32, 'ttl', None, None, ), # 4
)
def __init__(self, name=None, value=None, timestamp=None, ttl=None,):
self.name = name
self.value = value
self.timestamp = timestamp
self.ttl = ttl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.timestamp = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.ttl = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Column')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 2)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.timestamp is not None:
oprot.writeFieldBegin('timestamp', TType.I64, 3)
oprot.writeI64(self.timestamp)
oprot.writeFieldEnd()
if self.ttl is not None:
oprot.writeFieldBegin('ttl', TType.I32, 4)
oprot.writeI32(self.ttl)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SuperColumn:
"""
A named list of columns.
@param name. see Column.name.
@param columns. A collection of standard Columns. The columns within a super column are defined in an adhoc manner.
Columns within a super column do not have to have matching structures (similarly named child columns).
Attributes:
- name
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(Column, Column.thrift_spec)), None, ), # 2
)
def __init__(self, name=None, columns=None,):
self.name = name
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = Column()
_elem5.read(iprot)
self.columns.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SuperColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter6 in self.columns:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CounterColumn:
"""
Attributes:
- name
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.I64, 'value', None, None, ), # 2
)
def __init__(self, name=None, value=None,):
self.name = name
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.value = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CounterColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 2)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.value is None:
raise TProtocol.TProtocolException(message='Required field value is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CounterSuperColumn:
"""
Attributes:
- name
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(CounterColumn, CounterColumn.thrift_spec)), None, ), # 2
)
def __init__(self, name=None, columns=None,):
self.name = name
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in xrange(_size7):
_elem12 = CounterColumn()
_elem12.read(iprot)
self.columns.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CounterSuperColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter13 in self.columns:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnOrSuperColumn:
"""
Methods for fetching rows/records from Cassandra will return either a single instance of ColumnOrSuperColumn or a list
of ColumnOrSuperColumns (get_slice()). If you're looking up a SuperColumn (or list of SuperColumns) then the resulting
instances of ColumnOrSuperColumn will have the requested SuperColumn in the attribute super_column. For queries resulting
in Columns, those values will be in the attribute column. This change was made between 0.3 and 0.4 to standardize on
single query methods that may return either a SuperColumn or Column.
If the query was on a counter column family, you will either get a counter_column (instead of a column) or a
counter_super_column (instead of a super_column)
@param column. The Column returned by get() or get_slice().
@param super_column. The SuperColumn returned by get() or get_slice().
@param counter_column. The Counterolumn returned by get() or get_slice().
@param counter_super_column. The CounterSuperColumn returned by get() or get_slice().
Attributes:
- column
- super_column
- counter_column
- counter_super_column
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'column', (Column, Column.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'super_column', (SuperColumn, SuperColumn.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'counter_column', (CounterColumn, CounterColumn.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'counter_super_column', (CounterSuperColumn, CounterSuperColumn.thrift_spec), None, ), # 4
)
def __init__(self, column=None, super_column=None, counter_column=None, counter_super_column=None,):
self.column = column
self.super_column = super_column
self.counter_column = counter_column
self.counter_super_column = counter_super_column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.column = Column()
self.column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.super_column = SuperColumn()
self.super_column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.counter_column = CounterColumn()
self.counter_column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.counter_super_column = CounterSuperColumn()
self.counter_super_column.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnOrSuperColumn')
if self.column is not None:
oprot.writeFieldBegin('column', TType.STRUCT, 1)
self.column.write(oprot)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRUCT, 2)
self.super_column.write(oprot)
oprot.writeFieldEnd()
if self.counter_column is not None:
oprot.writeFieldBegin('counter_column', TType.STRUCT, 3)
self.counter_column.write(oprot)
oprot.writeFieldEnd()
if self.counter_super_column is not None:
oprot.writeFieldBegin('counter_super_column', TType.STRUCT, 4)
self.counter_super_column.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NotFoundException(Exception):
"""
A specific column was requested that does not exist.
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NotFoundException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InvalidRequestException(Exception):
"""
Invalid request could mean keyspace or column family does not exist, required parameters are missing, or a parameter is malformed.
why contains an associated error message.
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('InvalidRequestException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UnavailableException(Exception):
"""
Not all the replicas required could be created and/or read.
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UnavailableException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TimedOutException(Exception):
"""
RPC timeout was exceeded. either a node failed mid-operation, or load was too high, or the requested op was too large.
Attributes:
- acknowledged_by: if a write operation was acknowledged by some replicas but not by enough to
satisfy the required ConsistencyLevel, the number of successful
replies will be given here. In case of atomic_batch_mutate method this field
will be set to -1 if the batch was written to the batchlog and to 0 if it wasn't.
- acknowledged_by_batchlog: in case of atomic_batch_mutate method this field tells if the batch was written to the batchlog.
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'acknowledged_by', None, None, ), # 1
(2, TType.BOOL, 'acknowledged_by_batchlog', None, None, ), # 2
)
def __init__(self, acknowledged_by=None, acknowledged_by_batchlog=None,):
self.acknowledged_by = acknowledged_by
self.acknowledged_by_batchlog = acknowledged_by_batchlog
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.acknowledged_by = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.acknowledged_by_batchlog = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TimedOutException')
if self.acknowledged_by is not None:
oprot.writeFieldBegin('acknowledged_by', TType.I32, 1)
oprot.writeI32(self.acknowledged_by)
oprot.writeFieldEnd()
if self.acknowledged_by_batchlog is not None:
oprot.writeFieldBegin('acknowledged_by_batchlog', TType.BOOL, 2)
oprot.writeBool(self.acknowledged_by_batchlog)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthenticationException(Exception):
"""
invalid authentication request (invalid keyspace, user does not exist, or credentials invalid)
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthenticationException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthorizationException(Exception):
"""
invalid authorization request (user does not have access to keyspace)
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthorizationException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SchemaDisagreementException(Exception):
"""
NOTE: This up outdated exception left for backward compatibility reasons,
no actual schema agreement validation is done starting from Cassandra 1.2
schemas are not in agreement across all nodes
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SchemaDisagreementException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnParent:
"""
ColumnParent is used when selecting groups of columns from the same ColumnFamily. In directory structure terms, imagine
ColumnParent as ColumnPath + '/../'.
See also <a href="cassandra.html#Struct_ColumnPath">ColumnPath</a>
Attributes:
- column_family
- super_column
"""
thrift_spec = (
None, # 0
None, # 1
None, # 2
(3, TType.STRING, 'column_family', None, None, ), # 3
(4, TType.STRING, 'super_column', None, None, ), # 4
)
def __init__(self, column_family=None, super_column=None,):
self.column_family = column_family
self.super_column = super_column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 3:
if ftype == TType.STRING:
self.column_family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnParent')
if self.column_family is not None:
oprot.writeFieldBegin('column_family', TType.STRING, 3)
oprot.writeString(self.column_family)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 4)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_family is None:
raise TProtocol.TProtocolException(message='Required field column_family is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnPath:
"""
The ColumnPath is the path to a single column in Cassandra. It might make sense to think of ColumnPath and
ColumnParent in terms of a directory structure.
ColumnPath is used to looking up a single column.
@param column_family. The name of the CF of the column being looked up.
@param super_column. The super column name.
@param column. The column name.
Attributes:
- column_family
- super_column
- column
"""
thrift_spec = (
None, # 0
None, # 1
None, # 2
(3, TType.STRING, 'column_family', None, None, ), # 3
(4, TType.STRING, 'super_column', None, None, ), # 4
(5, TType.STRING, 'column', None, None, ), # 5
)
def __init__(self, column_family=None, super_column=None, column=None,):
self.column_family = column_family
self.super_column = super_column
self.column = column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 3:
if ftype == TType.STRING:
self.column_family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnPath')
if self.column_family is not None:
oprot.writeFieldBegin('column_family', TType.STRING, 3)
oprot.writeString(self.column_family)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 4)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
if self.column is not None:
oprot.writeFieldBegin('column', TType.STRING, 5)
oprot.writeString(self.column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_family is None:
raise TProtocol.TProtocolException(message='Required field column_family is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SliceRange:
"""
A slice range is a structure that stores basic range, ordering and limit information for a query that will return
multiple columns. It could be thought of as Cassandra's version of LIMIT and ORDER BY
@param start. The column name to start the slice with. This attribute is not required, though there is no default value,
and can be safely set to '', i.e., an empty byte array, to start with the first column name. Otherwise, it
must a valid value under the rules of the Comparator defined for the given ColumnFamily.
@param finish. The column name to stop the slice at. This attribute is not required, though there is no default value,
and can be safely set to an empty byte array to not stop until 'count' results are seen. Otherwise, it
must also be a valid value to the ColumnFamily Comparator.
@param reversed. Whether the results should be ordered in reversed order. Similar to ORDER BY blah DESC in SQL.
@param count. How many columns to return. Similar to LIMIT in SQL. May be arbitrarily large, but Thrift will
materialize the whole result into memory before returning it to the client, so be aware that you may
be better served by iterating through slices by passing the last value of one call in as the 'start'
of the next instead of increasing 'count' arbitrarily large.
Attributes:
- start
- finish
- reversed
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start', None, None, ), # 1
(2, TType.STRING, 'finish', None, None, ), # 2
(3, TType.BOOL, 'reversed', None, False, ), # 3
(4, TType.I32, 'count', None, 100, ), # 4
)
def __init__(self, start=None, finish=None, reversed=thrift_spec[3][4], count=thrift_spec[4][4],):
self.start = start
self.finish = finish
self.reversed = reversed
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.finish = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.reversed = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SliceRange')
if self.start is not None:
oprot.writeFieldBegin('start', TType.STRING, 1)
oprot.writeString(self.start)
oprot.writeFieldEnd()
if self.finish is not None:
oprot.writeFieldBegin('finish', TType.STRING, 2)
oprot.writeString(self.finish)
oprot.writeFieldEnd()
if self.reversed is not None:
oprot.writeFieldBegin('reversed', TType.BOOL, 3)
oprot.writeBool(self.reversed)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 4)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.start is None:
raise TProtocol.TProtocolException(message='Required field start is unset!')
if self.finish is None:
raise TProtocol.TProtocolException(message='Required field finish is unset!')
if self.reversed is None:
raise TProtocol.TProtocolException(message='Required field reversed is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SlicePredicate:
"""
A SlicePredicate is similar to a mathematic predicate (see http://en.wikipedia.org/wiki/Predicate_(mathematical_logic)),
which is described as "a property that the elements of a set have in common."
SlicePredicate's in Cassandra are described with either a list of column_names or a SliceRange. If column_names is
specified, slice_range is ignored.
@param column_name. A list of column names to retrieve. This can be used similar to Memcached's "multi-get" feature
to fetch N known column names. For instance, if you know you wish to fetch columns 'Joe', 'Jack',
and 'Jim' you can pass those column names as a list to fetch all three at once.
@param slice_range. A SliceRange describing how to range, order, and/or limit the slice.
Attributes:
- column_names
- slice_range
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'column_names', (TType.STRING,None), None, ), # 1
(2, TType.STRUCT, 'slice_range', (SliceRange, SliceRange.thrift_spec), None, ), # 2
)
def __init__(self, column_names=None, slice_range=None,):
self.column_names = column_names
self.slice_range = slice_range
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.column_names = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in xrange(_size14):
_elem19 = iprot.readString();
self.column_names.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.slice_range = SliceRange()
self.slice_range.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SlicePredicate')
if self.column_names is not None:
oprot.writeFieldBegin('column_names', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.column_names))
for iter20 in self.column_names:
oprot.writeString(iter20)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.slice_range is not None:
oprot.writeFieldBegin('slice_range', TType.STRUCT, 2)
self.slice_range.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IndexExpression:
"""
Attributes:
- column_name
- op
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'column_name', None, None, ), # 1
(2, TType.I32, 'op', None, None, ), # 2
(3, TType.STRING, 'value', None, None, ), # 3
)
def __init__(self, column_name=None, op=None, value=None,):
self.column_name = column_name
self.op = op
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.column_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.op = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('IndexExpression')
if self.column_name is not None:
oprot.writeFieldBegin('column_name', TType.STRING, 1)
oprot.writeString(self.column_name)
oprot.writeFieldEnd()
if self.op is not None:
oprot.writeFieldBegin('op', TType.I32, 2)
oprot.writeI32(self.op)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 3)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_name is None:
raise TProtocol.TProtocolException(message='Required field column_name is unset!')
if self.op is None:
raise TProtocol.TProtocolException(message='Required field op is unset!')
if self.value is None:
raise TProtocol.TProtocolException(message='Required field value is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IndexClause:
"""
@deprecated use a KeyRange with row_filter in get_range_slices instead
Attributes:
- expressions
- start_key
- count
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'expressions', (TType.STRUCT,(IndexExpression, IndexExpression.thrift_spec)), None, ), # 1
(2, TType.STRING, 'start_key', None, None, ), # 2
(3, TType.I32, 'count', None, 100, ), # 3
)
def __init__(self, expressions=None, start_key=None, count=thrift_spec[3][4],):
self.expressions = expressions
self.start_key = start_key
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.expressions = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in xrange(_size21):
_elem26 = IndexExpression()
_elem26.read(iprot)
self.expressions.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.start_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('IndexClause')
if self.expressions is not None:
oprot.writeFieldBegin('expressions', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.expressions))
for iter27 in self.expressions:
iter27.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.start_key is not None:
oprot.writeFieldBegin('start_key', TType.STRING, 2)
oprot.writeString(self.start_key)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 3)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.expressions is None:
raise TProtocol.TProtocolException(message='Required field expressions is unset!')
if self.start_key is None:
raise TProtocol.TProtocolException(message='Required field start_key is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyRange:
"""
The semantics of start keys and tokens are slightly different.
Keys are start-inclusive; tokens are start-exclusive. Token
ranges may also wrap -- that is, the end token may be less
than the start one. Thus, a range from keyX to keyX is a
one-element range, but a range from tokenY to tokenY is the
full ring.
Attributes:
- start_key
- end_key
- start_token
- end_token
- row_filter
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start_key', None, None, ), # 1
(2, TType.STRING, 'end_key', None, None, ), # 2
(3, TType.STRING, 'start_token', None, None, ), # 3
(4, TType.STRING, 'end_token', None, None, ), # 4
(5, TType.I32, 'count', None, 100, ), # 5
(6, TType.LIST, 'row_filter', (TType.STRUCT,(IndexExpression, IndexExpression.thrift_spec)), None, ), # 6
)
def __init__(self, start_key=None, end_key=None, start_token=None, end_token=None, row_filter=None, count=thrift_spec[5][4],):
self.start_key = start_key
self.end_key = end_key
self.start_token = start_token
self.end_token = end_token
self.row_filter = row_filter
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.end_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.start_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.end_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.row_filter = []
(_etype31, _size28) = iprot.readListBegin()
for _i32 in xrange(_size28):
_elem33 = IndexExpression()
_elem33.read(iprot)
self.row_filter.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeyRange')
if self.start_key is not None:
oprot.writeFieldBegin('start_key', TType.STRING, 1)
oprot.writeString(self.start_key)
oprot.writeFieldEnd()
if self.end_key is not None:
oprot.writeFieldBegin('end_key', TType.STRING, 2)
oprot.writeString(self.end_key)
oprot.writeFieldEnd()
if self.start_token is not None:
oprot.writeFieldBegin('start_token', TType.STRING, 3)
oprot.writeString(self.start_token)
oprot.writeFieldEnd()
if self.end_token is not None:
oprot.writeFieldBegin('end_token', TType.STRING, 4)
oprot.writeString(self.end_token)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 5)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
if self.row_filter is not None:
oprot.writeFieldBegin('row_filter', TType.LIST, 6)
oprot.writeListBegin(TType.STRUCT, len(self.row_filter))
for iter34 in self.row_filter:
iter34.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeySlice:
"""
A KeySlice is key followed by the data it maps to. A collection of KeySlice is returned by the get_range_slice operation.
@param key. a row key
@param columns. List of data represented by the key. Typically, the list is pared down to only the columns specified by
a SlicePredicate.
Attributes:
- key
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(ColumnOrSuperColumn, ColumnOrSuperColumn.thrift_spec)), None, ), # 2
)
def __init__(self, key=None, columns=None,):
self.key = key
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype38, _size35) = iprot.readListBegin()
for _i39 in xrange(_size35):
_elem40 = ColumnOrSuperColumn()
_elem40.read(iprot)
self.columns.append(_elem40)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeySlice')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter41 in self.columns:
iter41.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyCount:
"""
Attributes:
- key
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.I32, 'count', None, None, ), # 2
)
def __init__(self, key=None, count=None,):
self.key = key
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeyCount')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 2)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Deletion:
"""
Note that the timestamp is only optional in case of counter deletion.
Attributes:
- timestamp
- super_column
- predicate
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'timestamp', None, None, ), # 1
(2, TType.STRING, 'super_column', None, None, ), # 2
(3, TType.STRUCT, 'predicate', (SlicePredicate, SlicePredicate.thrift_spec), None, ), # 3
)
def __init__(self, timestamp=None, super_column=None, predicate=None,):
self.timestamp = timestamp
self.super_column = super_column
self.predicate = predicate
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.timestamp = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.predicate = SlicePredicate()
self.predicate.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Deletion')
if self.timestamp is not None:
oprot.writeFieldBegin('timestamp', TType.I64, 1)
oprot.writeI64(self.timestamp)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 2)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
if self.predicate is not None:
oprot.writeFieldBegin('predicate', TType.STRUCT, 3)
self.predicate.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Mutation:
"""
A Mutation is either an insert (represented by filling column_or_supercolumn) or a deletion (represented by filling the deletion attribute).
@param column_or_supercolumn. An insert to a column or supercolumn (possibly counter column or supercolumn)
@param deletion. A deletion of a column or supercolumn
Attributes:
- column_or_supercolumn
- deletion
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'column_or_supercolumn', (ColumnOrSuperColumn, ColumnOrSuperColumn.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'deletion', (Deletion, Deletion.thrift_spec), None, ), # 2
)
def __init__(self, column_or_supercolumn=None, deletion=None,):
self.column_or_supercolumn = column_or_supercolumn
self.deletion = deletion
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.column_or_supercolumn = ColumnOrSuperColumn()
self.column_or_supercolumn.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.deletion = Deletion()
self.deletion.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Mutation')
if self.column_or_supercolumn is not None:
oprot.writeFieldBegin('column_or_supercolumn', TType.STRUCT, 1)
self.column_or_supercolumn.write(oprot)
oprot.writeFieldEnd()
if self.deletion is not None:
oprot.writeFieldBegin('deletion', TType.STRUCT, 2)
self.deletion.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class EndpointDetails:
"""
Attributes:
- host
- datacenter
- rack
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'host', None, None, ), # 1
(2, TType.STRING, 'datacenter', None, None, ), # 2
(3, TType.STRING, 'rack', None, None, ), # 3
)
def __init__(self, host=None, datacenter=None, rack=None,):
self.host = host
self.datacenter = datacenter
self.rack = rack
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.datacenter = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.rack = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('EndpointDetails')
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host)
oprot.writeFieldEnd()
if self.datacenter is not None:
oprot.writeFieldBegin('datacenter', TType.STRING, 2)
oprot.writeString(self.datacenter)
oprot.writeFieldEnd()
if self.rack is not None:
oprot.writeFieldBegin('rack', TType.STRING, 3)
oprot.writeString(self.rack)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TokenRange:
"""
A TokenRange describes part of the Cassandra ring, it is a mapping from a range to
endpoints responsible for that range.
@param start_token The first token in the range
@param end_token The last token in the range
@param endpoints The endpoints responsible for the range (listed by their configured listen_address)
@param rpc_endpoints The endpoints responsible for the range (listed by their configured rpc_address)
Attributes:
- start_token
- end_token
- endpoints
- rpc_endpoints
- endpoint_details
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start_token', None, None, ), # 1
(2, TType.STRING, 'end_token', None, None, ), # 2
(3, TType.LIST, 'endpoints', (TType.STRING,None), None, ), # 3
(4, TType.LIST, 'rpc_endpoints', (TType.STRING,None), None, ), # 4
(5, TType.LIST, 'endpoint_details', (TType.STRUCT,(EndpointDetails, EndpointDetails.thrift_spec)), None, ), # 5
)
def __init__(self, start_token=None, end_token=None, endpoints=None, rpc_endpoints=None, endpoint_details=None,):
self.start_token = start_token
self.end_token = end_token
self.endpoints = endpoints
self.rpc_endpoints = rpc_endpoints
self.endpoint_details = endpoint_details
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.end_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.endpoints = []
(_etype45, _size42) = iprot.readListBegin()
for _i46 in xrange(_size42):
_elem47 = iprot.readString();
self.endpoints.append(_elem47)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.rpc_endpoints = []
(_etype51, _size48) = iprot.readListBegin()
for _i52 in xrange(_size48):
_elem53 = iprot.readString();
self.rpc_endpoints.append(_elem53)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.endpoint_details = []
(_etype57, _size54) = iprot.readListBegin()
for _i58 in xrange(_size54):
_elem59 = EndpointDetails()
_elem59.read(iprot)
self.endpoint_details.append(_elem59)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TokenRange')
if self.start_token is not None:
oprot.writeFieldBegin('start_token', TType.STRING, 1)
oprot.writeString(self.start_token)
oprot.writeFieldEnd()
if self.end_token is not None:
oprot.writeFieldBegin('end_token', TType.STRING, 2)
oprot.writeString(self.end_token)
oprot.writeFieldEnd()
if self.endpoints is not None:
oprot.writeFieldBegin('endpoints', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.endpoints))
for iter60 in self.endpoints:
oprot.writeString(iter60)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.rpc_endpoints is not None:
oprot.writeFieldBegin('rpc_endpoints', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.rpc_endpoints))
for iter61 in self.rpc_endpoints:
oprot.writeString(iter61)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.endpoint_details is not None:
oprot.writeFieldBegin('endpoint_details', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.endpoint_details))
for iter62 in self.endpoint_details:
iter62.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.start_token is None:
raise TProtocol.TProtocolException(message='Required field start_token is unset!')
if self.end_token is None:
raise TProtocol.TProtocolException(message='Required field end_token is unset!')
if self.endpoints is None:
raise TProtocol.TProtocolException(message='Required field endpoints is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthenticationRequest:
"""
Authentication requests can contain any data, dependent on the IAuthenticator used
Attributes:
- credentials
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'credentials', (TType.STRING,None,TType.STRING,None), None, ), # 1
)
def __init__(self, credentials=None,):
self.credentials = credentials
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.credentials = {}
(_ktype64, _vtype65, _size63 ) = iprot.readMapBegin()
for _i67 in xrange(_size63):
_key68 = iprot.readString();
_val69 = iprot.readString();
self.credentials[_key68] = _val69
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthenticationRequest')
if self.credentials is not None:
oprot.writeFieldBegin('credentials', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.credentials))
for kiter70,viter71 in self.credentials.items():
oprot.writeString(kiter70)
oprot.writeString(viter71)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.credentials is None:
raise TProtocol.TProtocolException(message='Required field credentials is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnDef:
"""
Attributes:
- name
- validation_class
- index_type
- index_name
- index_options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'validation_class', None, None, ), # 2
(3, TType.I32, 'index_type', None, None, ), # 3
(4, TType.STRING, 'index_name', None, None, ), # 4
(5, TType.MAP, 'index_options', (TType.STRING,None,TType.STRING,None), None, ), # 5
)
def __init__(self, name=None, validation_class=None, index_type=None, index_name=None, index_options=None,):
self.name = name
self.validation_class = validation_class
self.index_type = index_type
self.index_name = index_name
self.index_options = index_options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.index_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.index_options = {}
(_ktype73, _vtype74, _size72 ) = iprot.readMapBegin()
for _i76 in xrange(_size72):
_key77 = iprot.readString();
_val78 = iprot.readString();
self.index_options[_key77] = _val78
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnDef')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.validation_class is not None:
oprot.writeFieldBegin('validation_class', TType.STRING, 2)
oprot.writeString(self.validation_class)
oprot.writeFieldEnd()
if self.index_type is not None:
oprot.writeFieldBegin('index_type', TType.I32, 3)
oprot.writeI32(self.index_type)
oprot.writeFieldEnd()
if self.index_name is not None:
oprot.writeFieldBegin('index_name', TType.STRING, 4)
oprot.writeString(self.index_name)
oprot.writeFieldEnd()
if self.index_options is not None:
oprot.writeFieldBegin('index_options', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.index_options))
for kiter79,viter80 in self.index_options.items():
oprot.writeString(kiter79)
oprot.writeString(viter80)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.validation_class is None:
raise TProtocol.TProtocolException(message='Required field validation_class is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CfDef:
"""
Attributes:
- keyspace
- name
- column_type
- comparator_type
- subcomparator_type
- comment
- read_repair_chance
- column_metadata
- gc_grace_seconds
- default_validation_class
- id
- min_compaction_threshold
- max_compaction_threshold
- replicate_on_write
- key_validation_class
- key_alias
- compaction_strategy
- compaction_strategy_options
- compression_options
- bloom_filter_fp_chance
- caching
- dclocal_read_repair_chance
- row_cache_size: @deprecated
- key_cache_size: @deprecated
- row_cache_save_period_in_seconds: @deprecated
- key_cache_save_period_in_seconds: @deprecated
- memtable_flush_after_mins: @deprecated
- memtable_throughput_in_mb: @deprecated
- memtable_operations_in_millions: @deprecated
- merge_shards_chance: @deprecated
- row_cache_provider: @deprecated
- row_cache_keys_to_save: @deprecated
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'keyspace', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.STRING, 'column_type', None, "Standard", ), # 3
None, # 4
(5, TType.STRING, 'comparator_type', None, "BytesType", ), # 5
(6, TType.STRING, 'subcomparator_type', None, None, ), # 6
None, # 7
(8, TType.STRING, 'comment', None, None, ), # 8
(9, TType.DOUBLE, 'row_cache_size', None, None, ), # 9
None, # 10
(11, TType.DOUBLE, 'key_cache_size', None, None, ), # 11
(12, TType.DOUBLE, 'read_repair_chance', None, None, ), # 12
(13, TType.LIST, 'column_metadata', (TType.STRUCT,(ColumnDef, ColumnDef.thrift_spec)), None, ), # 13
(14, TType.I32, 'gc_grace_seconds', None, None, ), # 14
(15, TType.STRING, 'default_validation_class', None, None, ), # 15
(16, TType.I32, 'id', None, None, ), # 16
(17, TType.I32, 'min_compaction_threshold', None, None, ), # 17
(18, TType.I32, 'max_compaction_threshold', None, None, ), # 18
(19, TType.I32, 'row_cache_save_period_in_seconds', None, None, ), # 19
(20, TType.I32, 'key_cache_save_period_in_seconds', None, None, ), # 20
(21, TType.I32, 'memtable_flush_after_mins', None, None, ), # 21
(22, TType.I32, 'memtable_throughput_in_mb', None, None, ), # 22
(23, TType.DOUBLE, 'memtable_operations_in_millions', None, None, ), # 23
(24, TType.BOOL, 'replicate_on_write', None, None, ), # 24
(25, TType.DOUBLE, 'merge_shards_chance', None, None, ), # 25
(26, TType.STRING, 'key_validation_class', None, None, ), # 26
(27, TType.STRING, 'row_cache_provider', None, None, ), # 27
(28, TType.STRING, 'key_alias', None, None, ), # 28
(29, TType.STRING, 'compaction_strategy', None, None, ), # 29
(30, TType.MAP, 'compaction_strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 30
(31, TType.I32, 'row_cache_keys_to_save', None, None, ), # 31
(32, TType.MAP, 'compression_options', (TType.STRING,None,TType.STRING,None), None, ), # 32
(33, TType.DOUBLE, 'bloom_filter_fp_chance', None, None, ), # 33
(34, TType.STRING, 'caching', None, "keys_only", ), # 34
None, # 35
None, # 36
(37, TType.DOUBLE, 'dclocal_read_repair_chance', None, 0, ), # 37
)
def __init__(self, keyspace=None, name=None, column_type=thrift_spec[3][4], comparator_type=thrift_spec[5][4], subcomparator_type=None, comment=None, read_repair_chance=None, column_metadata=None, gc_grace_seconds=None, default_validation_class=None, id=None, min_compaction_threshold=None, max_compaction_threshold=None, replicate_on_write=None, key_validation_class=None, key_alias=None, compaction_strategy=None, compaction_strategy_options=None, compression_options=None, bloom_filter_fp_chance=None, caching=thrift_spec[34][4], dclocal_read_repair_chance=thrift_spec[37][4], row_cache_size=None, key_cache_size=None, row_cache_save_period_in_seconds=None, key_cache_save_period_in_seconds=None, memtable_flush_after_mins=None, memtable_throughput_in_mb=None, memtable_operations_in_millions=None, merge_shards_chance=None, row_cache_provider=None, row_cache_keys_to_save=None,):
self.keyspace = keyspace
self.name = name
self.column_type = column_type
self.comparator_type = comparator_type
self.subcomparator_type = subcomparator_type
self.comment = comment
self.read_repair_chance = read_repair_chance
self.column_metadata = column_metadata
self.gc_grace_seconds = gc_grace_seconds
self.default_validation_class = default_validation_class
self.id = id
self.min_compaction_threshold = min_compaction_threshold
self.max_compaction_threshold = max_compaction_threshold
self.replicate_on_write = replicate_on_write
self.key_validation_class = key_validation_class
self.key_alias = key_alias
self.compaction_strategy = compaction_strategy
self.compaction_strategy_options = compaction_strategy_options
self.compression_options = compression_options
self.bloom_filter_fp_chance = bloom_filter_fp_chance
self.caching = caching
self.dclocal_read_repair_chance = dclocal_read_repair_chance
self.row_cache_size = row_cache_size
self.key_cache_size = key_cache_size
self.row_cache_save_period_in_seconds = row_cache_save_period_in_seconds
self.key_cache_save_period_in_seconds = key_cache_save_period_in_seconds
self.memtable_flush_after_mins = memtable_flush_after_mins
self.memtable_throughput_in_mb = memtable_throughput_in_mb
self.memtable_operations_in_millions = memtable_operations_in_millions
self.merge_shards_chance = merge_shards_chance
self.row_cache_provider = row_cache_provider
self.row_cache_keys_to_save = row_cache_keys_to_save
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.keyspace = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.column_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.comparator_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.subcomparator_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.comment = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.DOUBLE:
self.read_repair_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.LIST:
self.column_metadata = []
(_etype84, _size81) = iprot.readListBegin()
for _i85 in xrange(_size81):
_elem86 = ColumnDef()
_elem86.read(iprot)
self.column_metadata.append(_elem86)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I32:
self.gc_grace_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRING:
self.default_validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I32:
self.id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.min_compaction_threshold = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.I32:
self.max_compaction_threshold = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 24:
if ftype == TType.BOOL:
self.replicate_on_write = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 26:
if ftype == TType.STRING:
self.key_validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 28:
if ftype == TType.STRING:
self.key_alias = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 29:
if ftype == TType.STRING:
self.compaction_strategy = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 30:
if ftype == TType.MAP:
self.compaction_strategy_options = {}
(_ktype88, _vtype89, _size87 ) = iprot.readMapBegin()
for _i91 in xrange(_size87):
_key92 = iprot.readString();
_val93 = iprot.readString();
self.compaction_strategy_options[_key92] = _val93
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 32:
if ftype == TType.MAP:
self.compression_options = {}
(_ktype95, _vtype96, _size94 ) = iprot.readMapBegin()
for _i98 in xrange(_size94):
_key99 = iprot.readString();
_val100 = iprot.readString();
self.compression_options[_key99] = _val100
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 33:
if ftype == TType.DOUBLE:
self.bloom_filter_fp_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 34:
if ftype == TType.STRING:
self.caching = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 37:
if ftype == TType.DOUBLE:
self.dclocal_read_repair_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.row_cache_size = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.DOUBLE:
self.key_cache_size = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.I32:
self.row_cache_save_period_in_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I32:
self.key_cache_save_period_in_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.I32:
self.memtable_flush_after_mins = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 22:
if ftype == TType.I32:
self.memtable_throughput_in_mb = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 23:
if ftype == TType.DOUBLE:
self.memtable_operations_in_millions = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 25:
if ftype == TType.DOUBLE:
self.merge_shards_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 27:
if ftype == TType.STRING:
self.row_cache_provider = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 31:
if ftype == TType.I32:
self.row_cache_keys_to_save = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CfDef')
if self.keyspace is not None:
oprot.writeFieldBegin('keyspace', TType.STRING, 1)
oprot.writeString(self.keyspace)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.column_type is not None:
oprot.writeFieldBegin('column_type', TType.STRING, 3)
oprot.writeString(self.column_type)
oprot.writeFieldEnd()
if self.comparator_type is not None:
oprot.writeFieldBegin('comparator_type', TType.STRING, 5)
oprot.writeString(self.comparator_type)
oprot.writeFieldEnd()
if self.subcomparator_type is not None:
oprot.writeFieldBegin('subcomparator_type', TType.STRING, 6)
oprot.writeString(self.subcomparator_type)
oprot.writeFieldEnd()
if self.comment is not None:
oprot.writeFieldBegin('comment', TType.STRING, 8)
oprot.writeString(self.comment)
oprot.writeFieldEnd()
if self.row_cache_size is not None:
oprot.writeFieldBegin('row_cache_size', TType.DOUBLE, 9)
oprot.writeDouble(self.row_cache_size)
oprot.writeFieldEnd()
if self.key_cache_size is not None:
oprot.writeFieldBegin('key_cache_size', TType.DOUBLE, 11)
oprot.writeDouble(self.key_cache_size)
oprot.writeFieldEnd()
if self.read_repair_chance is not None:
oprot.writeFieldBegin('read_repair_chance', TType.DOUBLE, 12)
oprot.writeDouble(self.read_repair_chance)
oprot.writeFieldEnd()
if self.column_metadata is not None:
oprot.writeFieldBegin('column_metadata', TType.LIST, 13)
oprot.writeListBegin(TType.STRUCT, len(self.column_metadata))
for iter101 in self.column_metadata:
iter101.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.gc_grace_seconds is not None:
oprot.writeFieldBegin('gc_grace_seconds', TType.I32, 14)
oprot.writeI32(self.gc_grace_seconds)
oprot.writeFieldEnd()
if self.default_validation_class is not None:
oprot.writeFieldBegin('default_validation_class', TType.STRING, 15)
oprot.writeString(self.default_validation_class)
oprot.writeFieldEnd()
if self.id is not None:
oprot.writeFieldBegin('id', TType.I32, 16)
oprot.writeI32(self.id)
oprot.writeFieldEnd()
if self.min_compaction_threshold is not None:
oprot.writeFieldBegin('min_compaction_threshold', TType.I32, 17)
oprot.writeI32(self.min_compaction_threshold)
oprot.writeFieldEnd()
if self.max_compaction_threshold is not None:
oprot.writeFieldBegin('max_compaction_threshold', TType.I32, 18)
oprot.writeI32(self.max_compaction_threshold)
oprot.writeFieldEnd()
if self.row_cache_save_period_in_seconds is not None:
oprot.writeFieldBegin('row_cache_save_period_in_seconds', TType.I32, 19)
oprot.writeI32(self.row_cache_save_period_in_seconds)
oprot.writeFieldEnd()
if self.key_cache_save_period_in_seconds is not None:
oprot.writeFieldBegin('key_cache_save_period_in_seconds', TType.I32, 20)
oprot.writeI32(self.key_cache_save_period_in_seconds)
oprot.writeFieldEnd()
if self.memtable_flush_after_mins is not None:
oprot.writeFieldBegin('memtable_flush_after_mins', TType.I32, 21)
oprot.writeI32(self.memtable_flush_after_mins)
oprot.writeFieldEnd()
if self.memtable_throughput_in_mb is not None:
oprot.writeFieldBegin('memtable_throughput_in_mb', TType.I32, 22)
oprot.writeI32(self.memtable_throughput_in_mb)
oprot.writeFieldEnd()
if self.memtable_operations_in_millions is not None:
oprot.writeFieldBegin('memtable_operations_in_millions', TType.DOUBLE, 23)
oprot.writeDouble(self.memtable_operations_in_millions)
oprot.writeFieldEnd()
if self.replicate_on_write is not None:
oprot.writeFieldBegin('replicate_on_write', TType.BOOL, 24)
oprot.writeBool(self.replicate_on_write)
oprot.writeFieldEnd()
if self.merge_shards_chance is not None:
oprot.writeFieldBegin('merge_shards_chance', TType.DOUBLE, 25)
oprot.writeDouble(self.merge_shards_chance)
oprot.writeFieldEnd()
if self.key_validation_class is not None:
oprot.writeFieldBegin('key_validation_class', TType.STRING, 26)
oprot.writeString(self.key_validation_class)
oprot.writeFieldEnd()
if self.row_cache_provider is not None:
oprot.writeFieldBegin('row_cache_provider', TType.STRING, 27)
oprot.writeString(self.row_cache_provider)
oprot.writeFieldEnd()
if self.key_alias is not None:
oprot.writeFieldBegin('key_alias', TType.STRING, 28)
oprot.writeString(self.key_alias)
oprot.writeFieldEnd()
if self.compaction_strategy is not None:
oprot.writeFieldBegin('compaction_strategy', TType.STRING, 29)
oprot.writeString(self.compaction_strategy)
oprot.writeFieldEnd()
if self.compaction_strategy_options is not None:
oprot.writeFieldBegin('compaction_strategy_options', TType.MAP, 30)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compaction_strategy_options))
for kiter102,viter103 in self.compaction_strategy_options.items():
oprot.writeString(kiter102)
oprot.writeString(viter103)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.row_cache_keys_to_save is not None:
oprot.writeFieldBegin('row_cache_keys_to_save', TType.I32, 31)
oprot.writeI32(self.row_cache_keys_to_save)
oprot.writeFieldEnd()
if self.compression_options is not None:
oprot.writeFieldBegin('compression_options', TType.MAP, 32)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compression_options))
for kiter104,viter105 in self.compression_options.items():
oprot.writeString(kiter104)
oprot.writeString(viter105)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.bloom_filter_fp_chance is not None:
oprot.writeFieldBegin('bloom_filter_fp_chance', TType.DOUBLE, 33)
oprot.writeDouble(self.bloom_filter_fp_chance)
oprot.writeFieldEnd()
if self.caching is not None:
oprot.writeFieldBegin('caching', TType.STRING, 34)
oprot.writeString(self.caching)
oprot.writeFieldEnd()
if self.dclocal_read_repair_chance is not None:
oprot.writeFieldBegin('dclocal_read_repair_chance', TType.DOUBLE, 37)
oprot.writeDouble(self.dclocal_read_repair_chance)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.keyspace is None:
raise TProtocol.TProtocolException(message='Required field keyspace is unset!')
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KsDef:
"""
Attributes:
- name
- strategy_class
- strategy_options
- replication_factor: @deprecated ignored
- cf_defs
- durable_writes
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'strategy_class', None, None, ), # 2
(3, TType.MAP, 'strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.I32, 'replication_factor', None, None, ), # 4
(5, TType.LIST, 'cf_defs', (TType.STRUCT,(CfDef, CfDef.thrift_spec)), None, ), # 5
(6, TType.BOOL, 'durable_writes', None, True, ), # 6
)
def __init__(self, name=None, strategy_class=None, strategy_options=None, replication_factor=None, cf_defs=None, durable_writes=thrift_spec[6][4],):
self.name = name
self.strategy_class = strategy_class
self.strategy_options = strategy_options
self.replication_factor = replication_factor
self.cf_defs = cf_defs
self.durable_writes = durable_writes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.strategy_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.strategy_options = {}
(_ktype107, _vtype108, _size106 ) = iprot.readMapBegin()
for _i110 in xrange(_size106):
_key111 = iprot.readString();
_val112 = iprot.readString();
self.strategy_options[_key111] = _val112
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.replication_factor = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.cf_defs = []
(_etype116, _size113) = iprot.readListBegin()
for _i117 in xrange(_size113):
_elem118 = CfDef()
_elem118.read(iprot)
self.cf_defs.append(_elem118)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.durable_writes = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KsDef')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.strategy_class is not None:
oprot.writeFieldBegin('strategy_class', TType.STRING, 2)
oprot.writeString(self.strategy_class)
oprot.writeFieldEnd()
if self.strategy_options is not None:
oprot.writeFieldBegin('strategy_options', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.strategy_options))
for kiter119,viter120 in self.strategy_options.items():
oprot.writeString(kiter119)
oprot.writeString(viter120)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.replication_factor is not None:
oprot.writeFieldBegin('replication_factor', TType.I32, 4)
oprot.writeI32(self.replication_factor)
oprot.writeFieldEnd()
if self.cf_defs is not None:
oprot.writeFieldBegin('cf_defs', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.cf_defs))
for iter121 in self.cf_defs:
iter121.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.durable_writes is not None:
oprot.writeFieldBegin('durable_writes', TType.BOOL, 6)
oprot.writeBool(self.durable_writes)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.strategy_class is None:
raise TProtocol.TProtocolException(message='Required field strategy_class is unset!')
if self.cf_defs is None:
raise TProtocol.TProtocolException(message='Required field cf_defs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlRow:
"""
Row returned from a CQL query
Attributes:
- key
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(Column, Column.thrift_spec)), None, ), # 2
)
def __init__(self, key=None, columns=None,):
self.key = key
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype125, _size122) = iprot.readListBegin()
for _i126 in xrange(_size122):
_elem127 = Column()
_elem127.read(iprot)
self.columns.append(_elem127)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlRow')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter128 in self.columns:
iter128.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlMetadata:
"""
Attributes:
- name_types
- value_types
- default_name_type
- default_value_type
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'name_types', (TType.STRING,None,TType.STRING,None), None, ), # 1
(2, TType.MAP, 'value_types', (TType.STRING,None,TType.STRING,None), None, ), # 2
(3, TType.STRING, 'default_name_type', None, None, ), # 3
(4, TType.STRING, 'default_value_type', None, None, ), # 4
)
def __init__(self, name_types=None, value_types=None, default_name_type=None, default_value_type=None,):
self.name_types = name_types
self.value_types = value_types
self.default_name_type = default_name_type
self.default_value_type = default_value_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.name_types = {}
(_ktype130, _vtype131, _size129 ) = iprot.readMapBegin()
for _i133 in xrange(_size129):
_key134 = iprot.readString();
_val135 = iprot.readString();
self.name_types[_key134] = _val135
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.value_types = {}
(_ktype137, _vtype138, _size136 ) = iprot.readMapBegin()
for _i140 in xrange(_size136):
_key141 = iprot.readString();
_val142 = iprot.readString();
self.value_types[_key141] = _val142
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.default_name_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.default_value_type = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlMetadata')
if self.name_types is not None:
oprot.writeFieldBegin('name_types', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.name_types))
for kiter143,viter144 in self.name_types.items():
oprot.writeString(kiter143)
oprot.writeString(viter144)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.value_types is not None:
oprot.writeFieldBegin('value_types', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.value_types))
for kiter145,viter146 in self.value_types.items():
oprot.writeString(kiter145)
oprot.writeString(viter146)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.default_name_type is not None:
oprot.writeFieldBegin('default_name_type', TType.STRING, 3)
oprot.writeString(self.default_name_type)
oprot.writeFieldEnd()
if self.default_value_type is not None:
oprot.writeFieldBegin('default_value_type', TType.STRING, 4)
oprot.writeString(self.default_value_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name_types is None:
raise TProtocol.TProtocolException(message='Required field name_types is unset!')
if self.value_types is None:
raise TProtocol.TProtocolException(message='Required field value_types is unset!')
if self.default_name_type is None:
raise TProtocol.TProtocolException(message='Required field default_name_type is unset!')
if self.default_value_type is None:
raise TProtocol.TProtocolException(message='Required field default_value_type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlResult:
"""
Attributes:
- type
- rows
- num
- schema
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,(CqlRow, CqlRow.thrift_spec)), None, ), # 2
(3, TType.I32, 'num', None, None, ), # 3
(4, TType.STRUCT, 'schema', (CqlMetadata, CqlMetadata.thrift_spec), None, ), # 4
)
def __init__(self, type=None, rows=None, num=None, schema=None,):
self.type = type
self.rows = rows
self.num = num
self.schema = schema
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
(_etype150, _size147) = iprot.readListBegin()
for _i151 in xrange(_size147):
_elem152 = CqlRow()
_elem152.read(iprot)
self.rows.append(_elem152)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.schema = CqlMetadata()
self.schema.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlResult')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter153 in self.rows:
iter153.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.num is not None:
oprot.writeFieldBegin('num', TType.I32, 3)
oprot.writeI32(self.num)
oprot.writeFieldEnd()
if self.schema is not None:
oprot.writeFieldBegin('schema', TType.STRUCT, 4)
self.schema.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.type is None:
raise TProtocol.TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlPreparedResult:
"""
Attributes:
- itemId
- count
- variable_types
- variable_names
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'itemId', None, None, ), # 1
(2, TType.I32, 'count', None, None, ), # 2
(3, TType.LIST, 'variable_types', (TType.STRING,None), None, ), # 3
(4, TType.LIST, 'variable_names', (TType.STRING,None), None, ), # 4
)
def __init__(self, itemId=None, count=None, variable_types=None, variable_names=None,):
self.itemId = itemId
self.count = count
self.variable_types = variable_types
self.variable_names = variable_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.itemId = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.variable_types = []
(_etype157, _size154) = iprot.readListBegin()
for _i158 in xrange(_size154):
_elem159 = iprot.readString();
self.variable_types.append(_elem159)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.variable_names = []
(_etype163, _size160) = iprot.readListBegin()
for _i164 in xrange(_size160):
_elem165 = iprot.readString();
self.variable_names.append(_elem165)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlPreparedResult')
if self.itemId is not None:
oprot.writeFieldBegin('itemId', TType.I32, 1)
oprot.writeI32(self.itemId)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 2)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
if self.variable_types is not None:
oprot.writeFieldBegin('variable_types', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.variable_types))
for iter166 in self.variable_types:
oprot.writeString(iter166)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.variable_names is not None:
oprot.writeFieldBegin('variable_names', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.variable_names))
for iter167 in self.variable_names:
oprot.writeString(iter167)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.itemId is None:
raise TProtocol.TProtocolException(message='Required field itemId is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CfSplit:
"""
Represents input splits used by hadoop ColumnFamilyRecordReaders
Attributes:
- start_token
- end_token
- row_count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start_token', None, None, ), # 1
(2, TType.STRING, 'end_token', None, None, ), # 2
(3, TType.I64, 'row_count', None, None, ), # 3
)
def __init__(self, start_token=None, end_token=None, row_count=None,):
self.start_token = start_token
self.end_token = end_token
self.row_count = row_count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.end_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.row_count = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CfSplit')
if self.start_token is not None:
oprot.writeFieldBegin('start_token', TType.STRING, 1)
oprot.writeString(self.start_token)
oprot.writeFieldEnd()
if self.end_token is not None:
oprot.writeFieldBegin('end_token', TType.STRING, 2)
oprot.writeString(self.end_token)
oprot.writeFieldEnd()
if self.row_count is not None:
oprot.writeFieldBegin('row_count', TType.I64, 3)
oprot.writeI64(self.row_count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.start_token is None:
raise TProtocol.TProtocolException(message='Required field start_token is unset!')
if self.end_token is None:
raise TProtocol.TProtocolException(message='Required field end_token is unset!')
if self.row_count is None:
raise TProtocol.TProtocolException(message='Required field row_count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| mit |
JianfengYao/pyvim | pyvim/commands/commands.py | 1 | 10546 | from __future__ import unicode_literals, print_function
import six
import os
__all__ = (
'has_command_handler',
'call_command_handler',
)
COMMANDS_TO_HANDLERS = {} # Global mapping Vi commands to their handler.
COMMANDS_TAKING_FILENAMES = set() # Name of commands that accept files.
SET_COMMANDS = {} # Mapping ':set'-commands to their handler.
SET_COMMANDS_TAKING_VALUE = set()
def has_command_handler(command):
return command in COMMANDS_TO_HANDLERS
def call_command_handler(command, editor, variables):
"""
Execute command.
"""
COMMANDS_TO_HANDLERS[command](editor, variables)
def get_commands():
return COMMANDS_TO_HANDLERS.keys()
def get_commands_taking_filenames():
return COMMANDS_TAKING_FILENAMES
# Decorators
def _cmd(name):
"""
Base decorator for registering commands in this namespace.
"""
def decorator(func):
COMMANDS_TO_HANDLERS[name] = func
return func
return decorator
def file_cmd(name):
"""
Decorator that registers a command that takes a filename as (optional)
parameter.
"""
COMMANDS_TAKING_FILENAMES.add(name)
def decorator(func):
@_cmd(name)
def command_wrapper(editor, variables):
filename = variables.get('filename')
func(editor, filename)
return func
return decorator
def cmd(name):
"""
Decarator that registers a command that doesn't take any parameters.
"""
def decorator(func):
@_cmd(name)
def command_wrapper(editor, variables):
func(editor)
return func
return decorator
def set_cmd(name, takes_value=False):
"""
Docorator that registers a ':set'-command.
"""
def decorator(func):
SET_COMMANDS[name] = func
if takes_value:
SET_COMMANDS_TAKING_VALUE.add(name)
return func
return decorator
# Actual command implementations
@_cmd('set')
def _(editor, variables):
"""
Execute a ':set' command.
"""
option = variables.get('set_option')
value = variables.get('set_value')
if option in SET_COMMANDS:
# Call the correct handler.
if option in SET_COMMANDS_TAKING_VALUE:
SET_COMMANDS[option](editor, value)
else:
SET_COMMANDS[option](editor)
else:
editor.show_message('Unknown option: %s' % option)
@cmd('bn')
def _(editor):
"""
Go to next buffer.
"""
editor.window_arrangement.go_to_next_buffer()
@cmd('bp')
def _(editor):
"""
Go to previous buffer.
"""
editor.window_arrangement.go_to_previous_buffer()
@cmd('only')
def _(editor):
"""
Keep only the current window.
"""
editor.window_arrangement.keep_only_current_window()
@cmd('hide')
def _(editor):
"""
Hide the current window.
"""
editor.window_arrangement.close_window()
@file_cmd('sp')
@file_cmd('split')
def _(editor, filename):
"""
Split window horizontally.
"""
editor.window_arrangement.hsplit(filename or None)
@file_cmd('vsp')
@file_cmd('vsplit')
def _(editor, filename):
"""
Split window vertically.
"""
editor.window_arrangement.vsplit(filename or None)
@cmd('new')
def _(editor):
"""
Create new buffer.
"""
editor.window_arrangement.hsplit(new=True)
@cmd('vnew')
def _(editor):
"""
Create new buffer, splitting vertically.
"""
editor.window_arrangement.vsplit(new=True)
@file_cmd('badd')
def _(editor, filename):
"""
Add a new buffer.
"""
editor.window_arrangement.open_buffer(filename)
@cmd('buffers')
def _(editor):
"""
List all buffers.
"""
def handler():
wa = editor.window_arrangement
for info in wa.list_open_buffers():
char = '%' if info.is_active else ''
eb = info.editor_buffer
print(' %3i %-2s %-20s line %i' % (
info.index, char, eb.filename, (eb.buffer.document.cursor_position_row + 1)))
(input() if six.PY3 else raw_input)('\nPress ENTER to continue...')
editor.cli.run_in_terminal(handler)
@_cmd('b')
@_cmd('buffer')
def _(editor, variables):
"""
Go to one of the open buffers.
"""
buffer_name = variables.get('buffer_name')
if buffer_name:
editor.window_arrangement.go_to_buffer(buffer_name)
@cmd('bw')
def _(editor):
"""
Wipe buffer.
"""
eb = editor.window_arrangement.active_editor_buffer
if eb.has_unsaved_changes:
editor.show_message('No write since last change for buffer. '
'(add ! to override)')
else:
editor.window_arrangement.close_buffer()
@cmd('bw!')
def _(editor):
"""
Force wipe buffer.
"""
editor.window_arrangement.close_buffer()
@file_cmd('e')
@file_cmd('edit')
def _(editor, filename):
"""
Edit new buffer.
"""
editor.window_arrangement.open_buffer(filename, show_in_current_window=True)
@cmd('q')
@cmd('quit')
def quit(editor, all_=False, force=False):
"""
Quit.
"""
ebs = editor.window_arrangement.editor_buffers
# When there are buffers that have unsaved changes, show balloon.
if not force and any(eb.has_unsaved_changes for eb in ebs):
editor.show_message('No write since last change (add ! to override)')
# When there is more than one buffer open.
elif not all_ and len(ebs) > 1:
editor.show_message('%i more files to edit' % (len(ebs) - 1))
else:
editor.cli.set_return_value('')
@cmd('qa')
@cmd('qall')
def _(editor):
"""
Quit all.
"""
quit(editor, all_=True)
@cmd('q!')
@cmd('qa!')
@cmd('quit!')
@cmd('qall!')
def _(editor):
"""
Force quit.
"""
quit(editor, all_=True, force=True)
@file_cmd('w')
@file_cmd('write')
def write(editor, filename, overwrite=False):
"""
Write file.
"""
if filename and not overwrite and os.path.exists(filename):
editor.show_message('File exists (add ! to overriwe)')
else:
eb = editor.window_arrangement.active_editor_buffer
if filename is None and eb.filename is None:
editor.show_message('No file name')
else:
eb.write(filename)
@file_cmd('w!')
@file_cmd('write!')
def _(editor, filename):
"""
Write (and overwrite) file.
"""
write(editor, filename, overwrite=True)
@file_cmd('wq')
def _(editor, filename):
"""
Write file and quit.
"""
write(editor, filename)
editor.cli.set_return_value('')
@cmd('wqa')
def _(editor):
"""
Write current buffer and quit all.
"""
eb = editor.window_arrangement.active_editor_buffer
if eb.filename is None:
editor.show_message('No file name for buffer')
else:
eb.write()
quit(editor, all_=True, force=False)
@file_cmd('wq!')
def _(editor, filename):
"""
Write file (and overwrite) and quit.
"""
write(editor, filename, overwrite=True)
editor.cli.set_return_value('')
@cmd('help')
def _(editor):
"""
Show help.
"""
editor.show_help()
@file_cmd('tabnew')
def _(editor, filename):
"""
Create new tab page.
"""
editor.window_arrangement.create_tab(filename or None)
@cmd('tabclose')
def _(editor):
"""
Close tab page.
"""
editor.window_arrangement.close_tab()
@cmd('tabnext')
def _(editor):
"""
Go to next tab.
"""
editor.window_arrangement.go_to_next_tab()
@cmd('tabprevious')
def _(editor):
"""
Go to previous tab.
"""
editor.window_arrangement.go_to_previous_tab()
@_cmd('colorscheme')
def _(editor, variables):
"""
Go to one of the open buffers.
"""
colorscheme = variables.get('colorscheme')
if colorscheme:
editor.use_colorscheme(colorscheme)
@set_cmd('nu')
@set_cmd('number')
def _(editor):
""" Show line numbers. """
editor.show_line_numbers = True
@set_cmd('nonu')
@set_cmd('nonumber')
def _(editor):
""" Hide line numbers. """
editor.show_line_numbers = False
@set_cmd('hlsearch')
def _(editor):
""" Highlight search matches. """
editor.highlight_search = True
@set_cmd('nohlsearch')
def _(editor):
""" Don't highlight search matches. """
editor.highlight_search = False
@set_cmd('paste')
def _(editor):
""" Enter paste mode. """
editor.paste_mode = True
@set_cmd('nopaste')
def _(editor):
""" Leave paste mode. """
editor.paste_mode = False
@set_cmd('ruler')
def _(editor):
""" Show ruler. """
editor.show_ruler = True
@set_cmd('noruler')
def _(editor):
""" Hide ruler. """
editor.show_ruler = False
@set_cmd('wildmenu')
@set_cmd('wmnu')
def _(editor):
""" Show wildmenu. """
editor.show_wildmenu = True
@set_cmd('nowildmenu')
@set_cmd('nowmnu')
def _(editor):
""" Hide wildmenu. """
editor.show_wildmenu = False
@set_cmd('expandtab')
@set_cmd('et')
def _(editor):
""" Enable tab expension. """
editor.expand_tab = True
@set_cmd('noexpandtab')
@set_cmd('noet')
def _(editor):
""" Disable tab expension. """
editor.expand_tab = False
@set_cmd('tabstop', takes_value=True)
@set_cmd('ts', takes_value=True)
def _(editor, value):
"""
Set tabstop.
"""
if value is None:
editor.show_message('tabstop=%i' % editor.tabstop)
else:
try:
value = int(value)
if value > 0:
editor.tabstop = value
else:
editor.show_message('Argument must be positive')
except ValueError:
editor.show_message('Number required after =')
@set_cmd('incsearch')
def _(editor):
""" Enable incsearch. """
editor.incsearch = True
@set_cmd('noincsearch')
def _(editor):
""" Disable incsearch. """
editor.incsearch = False
@set_cmd('ignorecase')
def _(editor):
""" Enable case insensitive searching. """
editor.ignore_case = True
@set_cmd('noignorecase')
def _(editor):
""" Disable case insensitive searching. """
editor.ignore_case = False
@set_cmd('list')
def _(editor):
""" Display unprintable characters. """
editor.display_unprintable_characters = True
@set_cmd('nolist')
def _(editor):
""" Hide unprintable characters. """
editor.display_unprintable_characters = False
@set_cmd('jedi')
def _(editor):
""" Enable Jedi autocompletion for Python files. """
editor.enable_jedi = True
@set_cmd('nojedi')
def _(editor):
""" Disable Jedi autocompletion. """
editor.enable_jedi = False
| bsd-3-clause |
cfei18/incubator-airflow | airflow/utils/__init__.py | 9 | 1382 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
| apache-2.0 |
faun/django_test | django/contrib/auth/tests/tokens.py | 18 | 2936 | from datetime import date, timedelta
from django.conf import settings
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.test import TestCase
class TokenGeneratorTest(TestCase):
def test_make_token(self):
"""
Ensure that we can make a token and that it is valid
"""
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertTrue(p0.check_token(user, tk1))
def test_10265(self):
"""
Ensure that the token generated for a user created in the same request
will work correctly.
"""
# See ticket #10265
user = User.objects.create_user('comebackkid', 'test3@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
reload = User.objects.get(username='comebackkid')
tk2 = p0.make_token(reload)
self.assertEqual(tk1, tk2)
def test_timeout(self):
"""
Ensure we can use the token after n days, but no greater.
"""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'today'
class Mocked(PasswordResetTokenGenerator):
def __init__(self, today):
self._today_val = today
def _today(self):
return self._today_val
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
p1 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS))
self.assertTrue(p1.check_token(user, tk1))
p2 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS + 1))
self.assertFalse(p2.check_token(user, tk1))
def test_django12_hash(self):
"""
Ensure we can use the hashes generated by Django 1.2
"""
# Hard code in the Django 1.2 algorithm (not the result, as it is time
# dependent)
def _make_token(user):
from django.utils.hashcompat import sha_constructor
from django.utils.http import int_to_base36
timestamp = (date.today() - date(2001,1,1)).days
ts_b36 = int_to_base36(timestamp)
hash = sha_constructor(settings.SECRET_KEY + unicode(user.id) +
user.password + user.last_login.strftime('%Y-%m-%d %H:%M:%S') +
unicode(timestamp)).hexdigest()[::2]
return "%s-%s" % (ts_b36, hash)
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = _make_token(user)
self.assertTrue(p0.check_token(user, tk1))
| bsd-3-clause |
meabsence/python-for-android | python-build/python-libs/gdata/build/lib/gdata/test_config_template.py | 133 | 2550 | #!/usr/bin/env python
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fill in this module with configuration data to use in tests.
See comments in the source code for explanations of the settings.
"""
import os
# To actually run the tests which use this configuration information you must
# change RUN_LIVE_TESTS to True.
RUN_LIVE_TESTS = False
# If set to True, the client will save responses from the server and reuse
# them in future runs of the test.
CACHE_RESPONSES = True
# If set to True, the client will make HTTP requests to the server regardless
# of a cache file. If True, caches from previous sessions will be deleted.
# If False (the default) cached sessions will be reused if they exist.
CLEAR_CACHE = True
GOOGLE_ACCOUNT_EMAIL = '<your email>'
GOOGLE_ACCOUNT_PASSWORD = '<your password>'
# For example, the TEST_FILES_DIR might be
# '/home/username/svn/gdata-python-client/tests'
TEST_FILES_DIR = '<location of the tests directory>'
class NoAuthConfig(object):
auth_token = False
class TestConfig(object):
service = None
auth_token = None
def email(cls):
"""Provides email to log into the test account for this service.
By default uses GOOGLE_ACCOUNT_EMAIL, so overwrite this function if you
have a service-specific test account.
"""
return GOOGLE_ACCOUNT_EMAIL
email = classmethod(email)
def password(cls):
"""Provides password to log into the test account for this service.
By default uses GOOGLE_ACCOUNT_PASSWORD, so overwrite this function if
you have a service-specific test account.
"""
return GOOGLE_ACCOUNT_PASSWORD
password = classmethod(password)
class BloggerConfig(TestConfig):
service = 'blogger'
title = 'A Test Post'
content = 'This is a <b>test</b>.'
blog_id = '<your test blog\'s id>'
class ContactsConfig(TestConfig):
service = 'cp'
def get_image_location(cls):
return os.path.join(TEST_FILES_DIR, 'files', 'testimage.jpg')
get_image_location = classmethod(get_image_location)
| apache-2.0 |
gbaty/pyside2 | tests/QtXml/qxmlsimplereader_test.py | 3 | 1331 | #!/usr/bin/python
import unittest
from PySide2.QtXml import QXmlSimpleReader
class QXmlSimpleReaderTest(unittest.TestCase):
def testQXmlSimpleReaderInstatiation(self):
'''QXmlSimpleReader must be a concrete class not an abstract one.'''
reader = QXmlSimpleReader()
def testQXmlSimpleReaderFeatures(self):
'''Calls the QXmlSimpleReader.features method. The features checked
(at least the first two) can be found in the QXmlSimpleReader documentation:
http://qt.nokia.com/doc/4.6/qxmlsimplereader.html#setFeature
'''
reader = QXmlSimpleReader()
hasFeature, ok = reader.feature('http://xml.org/sax/features/namespaces')
self.assertEqual((hasFeature, ok), (True, True))
hasFeature, ok = reader.feature('http://xml.org/sax/features/namespace-prefixes')
self.assertEqual((hasFeature, ok), (False, True))
hasFeature, ok = reader.feature('foobar')
self.assertEqual((hasFeature, ok), (False, False))
def testQXmlSimpleReaderProperty(self):
'''Tries to get a non existent property.'''
reader = QXmlSimpleReader()
prop, ok = reader.property('foo')
self.assertEqual((prop, ok), (None, False))
reader.setProperty('foo', 'Something')
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
code-for-india/sahana_shelter_worldbank | private/templates/uvg/menus.py | 2 | 4029 | # -*- coding: utf-8 -*-
from gluon import *
from s3 import *
from s3layouts import *
try:
from .layouts import *
except ImportError:
pass
import s3menus as default
# Below is an example which you can base your own template's menus.py on
# - there are also other examples in the other templates folders
# =============================================================================
class S3MainMenu(default.S3MainMenu):
"""
Custom Application Main Menu:
The main menu consists of several sub-menus, each of which can
be customised separately as a method of this class. The overall
composition of the menu is defined in the menu() method, which can
be customised as well:
Function Sub-Menu Access to (standard)
menu_modules() the modules menu the Eden modules
menu_gis() the GIS menu GIS configurations
menu_admin() the Admin menu System/User Administration
menu_lang() the Language menu Selection of the GUI locale
menu_auth() the User menu Login, Logout, User Profile
menu_help() the Help menu Contact page, About page
The standard uses the MM layout class for main menu items - but you
can of course use a custom layout class which you define in layouts.py.
Additional sub-menus can simply be defined as additional functions in
this class, and then be included in the menu() method.
Each sub-menu function returns a list of menu items, only the menu()
function must return a layout class instance.
"""
# -------------------------------------------------------------------------
#@classmethod
#def menu(cls):
#""" Compose Menu """
#main_menu = MM()(
## Modules-menu, align-left
#cls.menu_modules(),
## Service menus, align-right
## Note: always define right-hand items in reverse order!
#cls.menu_help(right=True),
#cls.menu_auth(right=True),
#cls.menu_lang(right=True),
#cls.menu_admin(right=True),
#cls.menu_gis(right=True)
#)
#return main_menu
# -------------------------------------------------------------------------
@classmethod
def menu_modules(cls):
""" Custom Modules Menu """
return [
homepage(name="UVG"),
homepage("gis"),
homepage("project"),
MM("more", link=False)(
homepage("org"),
homepage("doc")
),
]
# =============================================================================
#class S3OptionsMenu(default.S3OptionsMenu):
#"""
#Custom Controller Menus
#The options menu (left-hand options menu) is individual for each
#controller, so each controller has its own options menu function
#in this class.
#Each of these option menu functions can be customised separately,
#by simply overriding (re-defining) the default function. The
#options menu function must return an instance of the item layout.
#The standard menu uses the M item layout class, but you can of
#course also use any other layout class which you define in
#layouts.py (can also be mixed).
#Make sure additional helper functions in this class don't match
#any current or future controller prefix (e.g. by using an
#underscore prefix).
#"""
#def cr(self):
#""" CR / Shelter Registry """
#return M(c="cr")(
#M("Camp", f="shelter")(
#M("New", m="create"),
#M("List All"),
#M("Map", m="map"),
#M("Import", m="import"),
#)
#)
# END =========================================================================
| mit |
mikewiebe-ansible/ansible | lib/ansible/modules/cloud/univention/udm_user.py | 37 | 18102 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright: (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author:
- Tobias Rüetschi (@keachi)
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
description:
- First name. Required if C(state=present).
lastname:
description:
- Last name. Required if C(state=present).
password:
description:
- Password. Required if C(state=present).
birthday:
description:
- Birthday
city:
description:
- City of users business address.
country:
description:
- Country of users business address.
department_number:
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
description:
- Description (not gecos)
display_name:
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
default: []
description:
- A list of e-mail addresses.
employee_number:
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
description:
- Employee type
aliases: [ employeeType ]
gecos:
description:
- GECOS
groups:
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
description:
- Organisation
aliases: [ organization ]
override_pw_history:
type: bool
default: 'no'
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
type: bool
default: 'no'
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
description:
- List of telephone numbers.
postcode:
description:
- Postal code of users business address.
primary_group:
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
description:
- Windows profile directory
pwd_change_next_login:
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
description:
- Windows logon script.
secretary:
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
default: []
description:
- Enable user for the following service providers.
shell:
default: '/bin/bash'
description:
- Login shell
street:
description:
- Street of users business address.
title:
description:
- Title, e.g. C(Prof.).
unixhome:
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
import crypt
from datetime import date, timedelta
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
def main():
expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d")
module = AnsibleModule(
argument_spec=dict(
birthday=dict(default=None,
type='str'),
city=dict(default=None,
type='str'),
country=dict(default=None,
type='str'),
department_number=dict(default=None,
type='str',
aliases=['departmentNumber']),
description=dict(default=None,
type='str'),
display_name=dict(default=None,
type='str',
aliases=['displayName']),
email=dict(default=[''],
type='list'),
employee_number=dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type=dict(default=None,
type='str',
aliases=['employeeType']),
firstname=dict(default=None,
type='str'),
gecos=dict(default=None,
type='str'),
groups=dict(default=[],
type='list'),
home_share=dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path=dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number=dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive=dict(default=None,
type='str'),
lastname=dict(default=None,
type='str'),
mail_alternative_address=dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server=dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address=dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number=dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation=dict(default=None,
type='str',
aliases=['organization']),
overridePWHistory=dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength=dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber=dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password=dict(default=None,
type='str',
no_log=True),
phone=dict(default=[],
type='list'),
postcode=dict(default=None,
type='str'),
primary_group=dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath=dict(default=None,
type='str'),
pwd_change_next_login=dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number=dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges=dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations=dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome=dict(default=None,
type='str'),
scriptpath=dict(default=None,
type='str'),
secretary=dict(default=[],
type='list'),
serviceprovider=dict(default=[''],
type='list'),
shell=dict(default='/bin/bash',
type='str'),
street=dict(default=None,
type='str'),
title=dict(default=None,
type='str'),
unixhome=dict(default=None,
type='str'),
userexpiry=dict(default=expiry,
type='str'),
username=dict(required=True,
aliases=['name'],
type='str'),
position=dict(default='',
type='str'),
update_password=dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou=dict(default='',
type='str'),
subpath=dict(default='cn=users',
type='str'),
state=dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if=([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
diff = None
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={0}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={0},'.format(ou)
if subpath != '':
subpath = '{0},'.format(subpath)
container = '{0}{1}{2}'.format(subpath, ou, base_dn())
user_dn = 'uid={0},{1}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{0} {1}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{0}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except Exception:
module.fail_json(
msg="Creating/editing user {0} in {1} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={0})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except Exception:
module.fail_json(
msg="Adding groups to user {0} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except Exception:
module.fail_json(
msg="Removing user {0} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
| gpl-3.0 |
PKRoma/python-for-android | testapps/on_device_unit_tests/test_app/app_flask.py | 4 | 3672 | print('main.py was successfully called')
print('this is the new main.py')
import sys
print('python version is: ' + sys.version)
print('python path is', sys.path)
import os
print('imported os')
print('contents of this dir', os.listdir('./'))
from flask import (
Flask,
render_template,
request,
Markup
)
print('imported flask etc')
from constants import RUNNING_ON_ANDROID
from tools import (
run_test_suites_into_buffer,
get_failed_unittests_from,
vibrate_with_pyjnius,
get_android_python_activity,
set_device_orientation,
)
app = Flask(__name__)
TESTS_TO_PERFORM = dict()
NON_ANDROID_DEVICE_MSG = 'Not running from Android device'
def get_html_for_tested_modules(tested_modules, failed_tests):
modules_text = ''
for n, module in enumerate(sorted(tested_modules)):
print(module)
base_text = '<label class="{color}">{module}</label>'
if TESTS_TO_PERFORM[module] in failed_tests:
color = 'text-red'
else:
color = 'text-green'
if n != len(tested_modules) - 1:
base_text += ', '
modules_text += base_text.format(color=color, module=module)
return Markup(modules_text)
@app.route('/')
def index():
return render_template(
'index.html',
platform='Android' if RUNNING_ON_ANDROID else 'Desktop',
)
@app.route('/unittests')
def unittests():
import unittest
print('Imported unittest')
print("loading tests...")
suites = unittest.TestLoader().loadTestsFromNames(
list(TESTS_TO_PERFORM.values()),
)
print("running unittest...")
terminal_output = run_test_suites_into_buffer(suites)
print("unittest result is:")
unittest_error_text = terminal_output.split('\n')
print(terminal_output)
# get a nice colored `html` output for our tested recipes
failed_tests = get_failed_unittests_from(
terminal_output, TESTS_TO_PERFORM.values(),
)
colored_tested_recipes = get_html_for_tested_modules(
TESTS_TO_PERFORM.keys(), failed_tests,
)
return render_template(
'unittests.html',
tested_recipes=colored_tested_recipes,
unittests_output=unittest_error_text,
platform='Android' if RUNNING_ON_ANDROID else 'Desktop',
)
@app.route('/page2')
def page2():
return render_template(
'page2.html',
platform='Android' if RUNNING_ON_ANDROID else 'Desktop',
)
@app.route('/loadUrl')
def loadUrl():
if not RUNNING_ON_ANDROID:
print(NON_ANDROID_DEVICE_MSG, '...cancelled loadUrl.')
return NON_ANDROID_DEVICE_MSG
args = request.args
if 'url' not in args:
print('ERROR: asked to open an url but without url argument')
print('asked to open url', args['url'])
activity = get_android_python_activity()
activity.loadUrl(args['url'])
@app.route('/vibrate')
def vibrate():
if not RUNNING_ON_ANDROID:
print(NON_ANDROID_DEVICE_MSG, '...cancelled vibrate.')
return NON_ANDROID_DEVICE_MSG
args = request.args
if 'time' not in args:
print('ERROR: asked to vibrate but without time argument')
print('asked to vibrate', args['time'])
return vibrate_with_pyjnius(int(float(args['time']) * 1000))
@app.route('/orientation')
def orientation():
if not RUNNING_ON_ANDROID:
print(NON_ANDROID_DEVICE_MSG, '...cancelled orientation.')
return NON_ANDROID_DEVICE_MSG
args = request.args
if 'dir' not in args:
print('ERROR: asked to orient but no dir specified')
return 'No direction specified '
direction = args['dir']
return set_device_orientation(direction)
| mit |
xaccc/videoapiserver | SpaceService.py | 1 | 11937 | #coding=utf-8
#-*- encoding: utf-8 -*-
from datetime import datetime, timedelta
from MySQL import MySQL
from random import randint
import Config
import Utils
import UserService
def space_list(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
spaceListInstance = db.list('SELECT * FROM `space` WHERE `user_id` = %s ORDER BY `index` ASC', (userId))
results = []
for space in spaceListInstance:
results.append({
'Id': space['id'],
'Name': space['name'],
})
return {
'Count': len(results),
'Spaces': results,
}
def space_create(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
spaceId = Utils.UUID()
result = db.save("INSERT INTO `space` (`id`, `user_id`, `name`) VALUES (%s,%s,%s)",
(spaceId, userId, data.get('Name', '')))
db.end()
space_reindex({
'UserKey': data['UserKey'],
'Id': spaceId,
'After': data.get('After', ''),
})
return {
'Id': spaceId,
'Name': data.get('Name', '')
}
def space_reindex(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
spaceId = data.get('Id', '')
afterId = data.get('After', '')
index = []
spaceListInstance = db.list('SELECT * FROM `space` WHERE `user_id` = %s ORDER BY `index` ASC', (userId))
for space in spaceListInstance:
index.append(space['id'])
if not spaceId in index:
raise Exception('空间不存在')
index.remove(spaceId)
if afterId == 'HEAD':
index.insert(0, spaceId)
elif afterId in index:
index.insert(index.index(afterId) + 1, spaceId)
else:
index.append(spaceId)
for i,value in enumerate(index):
db.update("UPDATE `space` SET `index` = %s WHERE `id` = %s", (i, value))
db.end()
return {
'Id': spaceId,
}
def space_rename(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
result = db.update("UPDATE `space` SET `name` = %s WHERE `id` = %s AND `user_id` = %s", (data.get('Name', ''), data.get('Id', ''), userId))
db.end()
if result > 0:
return {
'Id': data.get('Id', ''),
'Name': data.get('Name', ''),
}
else:
raise Exception('更新失败或空间不存在')
def space_res_relation(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
# TEST AUTHORIZE
if __test_auth_edit(userId, data.get('Id', '')) > 0:
newId = Utils.UUID()
result = db.update("INSERT INTO `space_resource` (`id`, `space_id`, `owner_id`, `res_type`, `res_id`, `order_field1`, `order_field2`, `order_field3`) VALUES (%s,%s,%s,%s,%s,%s,%s,%s)",
(newId, data.get('Id', ''), userId, data.get('ResType', ''), data.get('ResId', ''), data.get('OrderField1', None), data.get('OrderField2', None), data.get('OrderField3', None)))
db.end()
if result > 0:
return {
'Id': data.get('Id', ''),
'ResType': data.get('ResType', ''),
'ResId': data.get('ResId', ''),
}
else:
raise Exception('更新失败或空间不存在')
else:
raise Exception('没有权限或空间不存在')
def space_res_unrelation(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
# TEST AUTHORIZE
if __test_auth_edit(userId, data.get('Id', '')) > 0:
result = db.delete("DELETE FROM `space_resource` WHERE `space_id`=%s AND `res_type`=%s AND `res_id`=%s",
(data.get('Id', ''), data.get('ResType', ''), data.get('ResId', '')))
db.end()
if result > 0:
return {
'Id': data.get('Id', ''),
'ResType': data.get('ResType', ''),
'ResId': data.get('ResId', ''),
}
else:
raise Exception('删除失败或资源不存在')
else:
raise Exception('没有权限或空间不存在')
def space_res_order(data):
userId = UserService.user_id(data['UserKey'])
db = MySQL()
# TEST AUTHORIZE
if __test_auth_edit(userId, data.get('Id', '')) > 0:
newId = Utils.UUID()
result = db.update("UPDATE `space_resource` SET `order_field1`=%s, `order_field2`=%s, `order_field3`=%s WHERE `space_id`=%s AND `res_type`=%s AND `res_id`=%s",
(data.get('OrderField1', None), data.get('OrderField2', None), data.get('OrderField3', None), data.get('Id', ''), userId, data.get('ResType', ''), data.get('ResId', '')))
db.end()
if result > 0:
return {
'Id': data.get('Id', ''),
'ResType': data.get('ResType', ''),
'ResId': data.get('ResId', ''),
}
else:
raise Exception('更新失败或空间不存在')
else:
raise Exception('没有权限或空间不存在')
def space_res_list(data):
userId = UserService.user_id(data['UserKey'])
if __test_auth_view(userId, data.get('Id', '')) > 0:
db = MySQL()
offset = long(data.get('Offset', 0))
sort = max(1, min(3, int(data.get('Sort', 1))))
order = int(data.get('Order', 0))
listMax = min(100, data.get('Max', 10))
resCount = db.get("SELECT COUNT(*) AS c FROM `space_resource` WHERE `space_id` = %s AND `res_type`=%s", (data.get('Id', ''), data.get('ResType', '')))['c']
resList = db.list("SELECT * FROM `space_resource` WHERE `space_id` = %s AND `res_type`=%s",
(data.get('Id', ''), data.get('ResType', '')), sort='order_field%s'%sort, order='DESC' if order == 0 else 'ASC', offset=offset, pagesize=listMax )
results = []
for res in resList:
results.append({
'ResId': res['res_id'],
'OrderField1': res['order_field1'],
'OrderField2': res['order_field2'],
'OrderField3': res['order_field3'],
})
return {
'Id': data.get('Id', ''),
'ResType': data.get('ResType', ''),
'Count': resCount,
'Offset': offset,
'Max': listMax,
'Sort': sort,
'Order': order,
'Results': results
}
else:
raise Exception('没有权限或空间不存在')
def space_authorize(data):
userId = UserService.user_id(data['UserKey'])
spaceInstance = space_get(data.get('Id', ''))
if userId == spaceInstance['user_id']:
allowEdit = min(1, max(0, int(data.get('AllowEdit', 0))))
db = MySQL()
authorizeUser = UserService.user_get(data.get('UserId', ''))
result = db.update("REPLACE INTO `space_authorize` (`space_id`, `user_id`, `allow_edit`) VALUES (%s,%s,%s)",
(data.get('Id', ''), data.get('UserId', ''), allowEdit))
db.end()
return {
'Id': spaceInstance['id'],
'Name': spaceInstance['name'],
'UserId': authorizeUser['id'],
'UserName': authorizeUser['name'],
'AllowEdit': allowEdit,
}
else:
raise Exception('没有权限或空间不存在')
def space_unauthorize(data):
userId = UserService.user_id(data['UserKey'])
spaceInstance = space_get(data.get('Id', ''))
if userId == spaceInstance['user_id']:
db = MySQL()
result = db.delete("DELETE FROM `space_authorize` WHERE `space_id`=%s AND `user_id`=%s",
(data.get('Id', ''), data.get('UserId', '')))
db.end()
if result > 0:
return {
'Id': data.get('Id', ''),
'UserId': data.get('UserId', ''),
}
else:
raise Exception('删除失败或授权不存在')
else:
raise Exception('没有权限或空间不存在')
def space_authorize_list(data):
userId = UserService.user_id(data['UserKey'])
spaceInstance = space_get(data.get('Id', ''))
if userId == spaceInstance['user_id']:
db = MySQL()
results=[]
for item in db.list("SELECT DISTINCT * FROM `space_authorize` WHERE `space_id`=%s", data.get('Id', '')):
authorizeUser = UserService.user_get(item['user_id'])
results.append({
'UserId': item['user_id'],
'UserName': authorizeUser['name'],
'AllowEdit': item['allow_edit']
})
return {
'Id': data.get('Id', ''),
'Name': spaceInstance['name'],
'Results': results
}
else:
raise Exception('没有权限或空间不存在')
def space_authorized_spaces(data):
userId = UserService.user_id(data['UserKey'])
userInstance = UserService.user_get(userId)
db = MySQL()
results=[]
for item in db.list("SELECT DISTINCT * FROM `space_authorize` WHERE `user_id`=%s", userId):
spaceInstance = space_get(item['space_id'])
if spaceInstance:
spaceOwner = UserService.user_get(spaceInstance['user_id'], notRaise=True)
results.append({
'Id': spaceInstance['id'],
'Name': spaceInstance['name'],
'Owner': spaceOwner['name'] if spaceOwner else None,
'OwnerId': spaceInstance['user_id'],
'AllowEdit': item['allow_edit']
})
return {
'Count': len(results),
'Results': results
}
def space_authorized_resources(data):
spaceIds = []
if data.get('SpaceId', None):
spaceIds.append(data.get('SpaceId'))
else:
ownerId = data.get('OwnerId', None)
for space in space_authorized_spaces(data)['Results']:
if space['OwnerId'] == ownerId:
spaceIds.append(space['Id'])
if len(spaceIds) > 0:
offset = long(data.get('Offset', 0))
sort = max(1, min(3, int(data.get('Sort', 1))))
order = int(data.get('Order', 0))
listMax = min(100, data.get('Max', 10))
prefixCountSQL = 'SELECT COUNT(*) AS c FROM `space_resource` WHERE `space_id` IN (%s)' % ', '.join(list(map(lambda x: '%s', spaceIds)))
prefixSelectSQL = 'SELECT * FROM `space_resource` WHERE `space_id` IN (%s)' % ', '.join(list(map(lambda x: '%s', spaceIds)))
db = MySQL()
resCount = db.get(prefixCountSQL + " AND `res_type`=%s", tuple(spaceIds) + (data.get('ResType', None),)) ['c']
resList = db.list(prefixSelectSQL + " AND `res_type`=%s",
tuple(spaceIds) + (data.get('ResType', None),), sort='order_field%s'%sort, order='DESC' if order == 0 else 'ASC', offset=offset, pagesize=listMax)
results = []
for res in resList:
spaceInstance = space_get(res['space_id'])
results.append({
'Id': res['space_id'],
'Name': spaceInstance['name'],
'ResId': res['res_id'],
'OrderField1': res['order_field1'],
'OrderField2': res['order_field2'],
'OrderField3': res['order_field3'],
})
return {
'ResType': data.get('ResType', ''),
'Count': resCount,
'Offset': offset,
'Max': listMax,
'Sort': sort,
'Order': order,
'Results': results,
}
else:
raise Exception('没有可访问的空间')
def space_get(spaceId):
return MySQL().get("SELECT * FROM `space` WHERE `id` = %s", spaceId)
def __test_auth_edit(userId, spaceId):
db = MySQL()
authorized = 0
# TEST AUTHORIZE
spaceInstance = db.get('SELECT * FROM `space` WHERE `id` = %s', (spaceId))
if spaceInstance:
if userId == spaceInstance['user_id']:
authorized = 1
else:
authorized = db.get('SELECT COUNT(*) AS c FROM `space_authorize` WHERE `space_id`=%s AND `user_id` = %s AND `allow_edit` = 1', (data.get('Id', ''), userId))['c']
else:
authorized = -1
return authorized
def __test_auth_view(userId, spaceId):
db = MySQL()
authorized = 0
# TEST AUTHORIZE
spaceInstance = db.get('SELECT * FROM `space` WHERE `id` = %s', (spaceId))
if spaceInstance:
if userId == spaceInstance['user_id']:
authorized = 1
else:
authorized = db.get('SELECT COUNT(*) AS c FROM `space_authorize` WHERE `space_id`=%s AND `user_id` = %s', (data.get('Id', ''), userId))['c']
else:
authorized = -1
return authorized
import unittest
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testCreate(self):
newSpace = space_create({
'UserKey': sys.argv[1],
'Name': '测试空间ABC'
})
pass
if __name__ == '__main__':
unittest.main()
import sys, json
if len(sys.argv) > 1:
newSpace = space_create({
'UserKey': sys.argv[1],
'Name': '测试空间ABC'
})
print json.dumps(space_list({ 'UserKey': sys.argv[1] }),sort_keys=False,indent=4)
space_reindex({
'UserKey': sys.argv[1],
'Id': newSpace['Id'],
'After': 'HEAD',
})
print json.dumps(space_list({ 'UserKey': sys.argv[1] }),sort_keys=False,indent=4)
space_rename({
'UserKey': sys.argv[1],
'Id': newSpace['Id'],
'Name': 'test-123' + datetime.now().strftime('%H:%M:%S'),
})
print json.dumps(space_list({ 'UserKey': sys.argv[1] }),sort_keys=False,indent=4)
xx = space_res_list({
'UserKey': '59977db2d5de49c385b6942fa025f252',
'Id': 'c95fb2b4148e47538f6c8958edd307e1',
'ResType': 'video'
})
print xx
print json.dumps(xx,sort_keys=False,indent=4) | gpl-2.0 |
bregman-arie/ansible | lib/ansible/parsing/metadata.py | 117 | 10058 | # (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import sys
import yaml
from ansible.module_utils._text import to_text
# There are currently defaults for all metadata fields so we can add it
# automatically if a file doesn't specify it
DEFAULT_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
class ParseError(Exception):
"""Thrown when parsing a file fails"""
pass
def _seek_end_of_dict(module_data, start_line, start_col, next_node_line, next_node_col):
"""Look for the end of a dict in a set of lines
We know the starting position of the dict and we know the start of the
next code node but in between there may be multiple newlines and comments.
There may also be multiple python statements on the same line (separated
by semicolons)
Examples::
ANSIBLE_METADATA = {[..]}
DOCUMENTATION = [..]
ANSIBLE_METADATA = {[..]} # Optional comments with confusing junk => {}
# Optional comments {}
DOCUMENTATION = [..]
ANSIBLE_METADATA = {
[..]
}
# Optional comments {}
DOCUMENTATION = [..]
ANSIBLE_METADATA = {[..]} ; DOCUMENTATION = [..]
ANSIBLE_METADATA = {}EOF
"""
if next_node_line is None:
# The dict is the last statement in the file
snippet = module_data.splitlines()[start_line:]
next_node_col = 0
# Include the last line in the file
last_line_offset = 0
else:
# It's somewhere in the middle so we need to separate it from the rest
snippet = module_data.splitlines()[start_line:next_node_line]
# Do not include the last line because that's where the next node
# starts
last_line_offset = 1
if next_node_col == 0:
# This handles all variants where there are only comments and blank
# lines between the dict and the next code node
# Step backwards through all the lines in the snippet
for line_idx, line in tuple(reversed(tuple(enumerate(snippet))))[last_line_offset:]:
end_col = None
# Step backwards through all the characters in the line
for col_idx, char in reversed(tuple(enumerate(c for c in line))):
if not isinstance(char, bytes):
# Python3 wart. slicing a byte string yields integers
char = bytes((char,))
if char == b'}' and end_col is None:
# Potentially found the end of the dict
end_col = col_idx
elif char == b'#' and end_col is not None:
# The previous '}' was part of a comment. Keep trying
end_col = None
if end_col is not None:
# Found the end!
end_line = start_line + line_idx
break
else:
raise ParseError('Unable to find the end of dictionary')
else:
# Harder cases involving multiple statements on one line
# Good Ansible Module style doesn't do this so we're just going to
# treat this as an error for now:
raise ParseError('Multiple statements per line confuses the module metadata parser.')
return end_line, end_col
def _seek_end_of_string(module_data, start_line, start_col, next_node_line, next_node_col):
"""
This is much trickier than finding the end of a dict. A dict has only one
ending character, "}". Strings have four potential ending characters. We
have to parse the beginning of the string to determine what the ending
character will be.
Examples:
ANSIBLE_METADATA = '''[..]''' # Optional comment with confusing chars '''
# Optional comment with confusing chars '''
DOCUMENTATION = [..]
ANSIBLE_METADATA = '''
[..]
'''
DOCUMENTATIONS = [..]
ANSIBLE_METADATA = '''[..]''' ; DOCUMENTATION = [..]
SHORT_NAME = ANSIBLE_METADATA = '''[..]''' ; DOCUMENTATION = [..]
String marker variants:
* '[..]'
* "[..]"
* '''[..]'''
* \"\"\"[..]\"\"\"
Each of these come in u, r, and b variants:
* '[..]'
* u'[..]'
* b'[..]'
* r'[..]'
* ur'[..]'
* ru'[..]'
* br'[..]'
* b'[..]'
* rb'[..]'
"""
raise NotImplementedError('Finding end of string not yet implemented')
def extract_metadata(module_ast=None, module_data=None, offsets=False):
"""Extract the metadata from a module
:kwarg module_ast: ast representation of the module. At least one of this
or ``module_data`` must be given. If the code calling
:func:`extract_metadata` has already parsed the module_data into an ast,
giving the ast here will save reparsing it.
:kwarg module_data: Byte string containing a module's code. At least one
of this or ``module_ast`` must be given.
:kwarg offsets: If set to True, offests into the source code will be
returned. This requires that ``module_data`` be set.
:returns: a tuple of metadata (a dict), line the metadata starts on,
column the metadata starts on, line the metadata ends on, column the
metadata ends on, and the names the metadata is assigned to. One of
the names the metadata is assigned to will be ANSIBLE_METADATA. If no
metadata is found, the tuple will be (None, -1, -1, -1, -1, None).
If ``offsets`` is False then the tuple will consist of
(metadata, -1, -1, -1, -1, None).
:raises ansible.parsing.metadata.ParseError: if ``module_data`` does not parse
:raises SyntaxError: if ``module_data`` is needed but does not parse correctly
"""
if offsets and module_data is None:
raise TypeError('If offsets is True then module_data must also be given')
if module_ast is None and module_data is None:
raise TypeError('One of module_ast or module_data must be given')
metadata = None
start_line = -1
start_col = -1
end_line = -1
end_col = -1
targets = None
if module_ast is None:
module_ast = ast.parse(module_data)
for root_idx, child in reversed(list(enumerate(module_ast.body))):
if isinstance(child, ast.Assign):
for target in child.targets:
if isinstance(target, ast.Name) and target.id == 'ANSIBLE_METADATA':
metadata = ast.literal_eval(child.value)
if not offsets:
continue
try:
# Determine where the next node starts
next_node = module_ast.body[root_idx + 1]
next_lineno = next_node.lineno
next_col_offset = next_node.col_offset
except IndexError:
# Metadata is defined in the last node of the file
next_lineno = None
next_col_offset = None
if isinstance(child.value, ast.Dict):
# Determine where the current metadata ends
end_line, end_col = _seek_end_of_dict(module_data,
child.lineno - 1,
child.col_offset,
next_lineno,
next_col_offset)
elif isinstance(child.value, ast.Str):
metadata = yaml.safe_load(child.value.s)
end_line, end_col = _seek_end_of_string(module_data,
child.lineno - 1,
child.col_offset,
next_lineno,
next_col_offset)
elif isinstance(child.value, ast.Bytes):
metadata = yaml.safe_load(to_text(child.value.s, errors='surrogate_or_strict'))
end_line, end_col = _seek_end_of_string(module_data,
child.lineno - 1,
child.col_offset,
next_lineno,
next_col_offset)
else:
raise ParseError('Ansible plugin metadata must be a dict')
# Do these after the if-else so we don't pollute them in
# case this was a false positive
start_line = child.lineno - 1
start_col = child.col_offset
targets = [t.id for t in child.targets]
break
if metadata is not None:
# Once we've found the metadata we're done
break
return metadata, start_line, start_col, end_line, end_col, targets
| gpl-3.0 |
sudovijay/youtube-dl | test/test_utils.py | 40 | 27918 | #!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Various small unit tests
import io
import json
import xml.etree.ElementTree
from youtube_dl.utils import (
age_restricted,
args_to_str,
clean_html,
DateRange,
detect_exe_version,
encodeFilename,
escape_rfc3986,
escape_url,
ExtractorError,
find_xpath_attr,
fix_xml_ampersands,
InAdvancePagedList,
intlist_to_bytes,
is_html,
js_to_json,
limit_length,
OnDemandPagedList,
orderedSet,
parse_duration,
parse_filesize,
parse_iso8601,
read_batch_urls,
sanitize_filename,
sanitize_path,
prepend_extension,
replace_extension,
shell_quote,
smuggle_url,
str_to_int,
strip_jsonp,
struct_unpack,
timeconvert,
unescapeHTML,
unified_strdate,
unsmuggle_url,
uppercase_escape,
lowercase_escape,
url_basename,
urlencode_postdata,
version_tuple,
xpath_with_ns,
xpath_text,
render_table,
match_str,
parse_dfxp_time_expr,
dfxp2srt,
)
class TestUtil(unittest.TestCase):
def test_timeconvert(self):
self.assertTrue(timeconvert('') is None)
self.assertTrue(timeconvert('bougrg') is None)
def test_sanitize_filename(self):
self.assertEqual(sanitize_filename('abc'), 'abc')
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
self.assertEqual(sanitize_filename('123'), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de'))
self.assertFalse('/' in sanitize_filename('abc/de///'))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de'))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|'))
self.assertEqual('yes no', sanitize_filename('yes? no'))
self.assertEqual('this - that', sanitize_filename('this: that'))
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
aumlaut = 'ä'
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
self.assertEqual(sanitize_filename(tests), tests)
self.assertEqual(
sanitize_filename('New World record at 0:12:34'),
'New World record at 0_12_34')
self.assertEqual(sanitize_filename('--gasdgf'), '_-gasdgf')
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
self.assertEqual(sanitize_filename('.gasdgf'), 'gasdgf')
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
forbidden = '"\0\\/'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc))
def test_sanitize_filename_restricted(self):
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
self.assertEqual(sanitize_filename('123', restricted=True), '123')
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
tests = 'a\xe4b\u4e2d\u56fd\u7684c'
self.assertEqual(sanitize_filename(tests, restricted=True), 'a_b_c')
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
for fc in forbidden:
for fbc in forbidden:
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
# Handle a common case more neatly
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
# .. but make sure the file name is never empty
self.assertTrue(sanitize_filename('-', restricted=True) != '')
self.assertTrue(sanitize_filename(':', restricted=True) != '')
def test_sanitize_ids(self):
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
def test_sanitize_path(self):
if sys.platform != 'win32':
return
self.assertEqual(sanitize_path('abc'), 'abc')
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
self.assertEqual(sanitize_path('abc|def'), 'abc#def')
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
self.assertEqual(
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
self.assertEqual(
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
self.assertEqual(sanitize_path('../abc'), '..\\abc')
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
def test_replace_extension(self):
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
def test_ordered_set(self):
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
self.assertEqual(orderedSet([]), [])
self.assertEqual(orderedSet([1]), [1])
# keep the list ordered
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
def test_unescape_html(self):
self.assertEqual(unescapeHTML('%20;'), '%20;')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(unescapeHTML('/'), '/')
self.assertEqual(
unescapeHTML('é'), 'é')
def test_daterange(self):
_20century = DateRange("19000101", "20000101")
self.assertFalse("17890714" in _20century)
_ac = DateRange("00010101")
self.assertTrue("19690721" in _ac)
_firstmilenium = DateRange(end="10000101")
self.assertTrue("07110427" in _firstmilenium)
def test_unified_dates(self):
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
self.assertEqual(
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
'20141126')
self.assertEqual(
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
'20150202')
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
def test_find_xpath_attr(self):
testxml = '''<root>
<node/>
<node x="a"/>
<node x="a" y="c" />
<node x="b" y="d" />
<node x="" />
</root>'''
doc = xml.etree.ElementTree.fromstring(testxml)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
def test_xpath_with_ns(self):
testxml = '''<root xmlns:media="http://example.com/">
<media:song>
<media:author>The Author</media:author>
<url>http://server.com/download.mp3</url>
</media:song>
</root>'''
doc = xml.etree.ElementTree.fromstring(testxml)
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
self.assertTrue(find('media:song') is not None)
self.assertEqual(find('media:song/media:author').text, 'The Author')
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
def test_xpath_text(self):
testxml = '''<root>
<div>
<p>Foo</p>
</div>
</root>'''
doc = xml.etree.ElementTree.fromstring(testxml)
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
self.assertTrue(xpath_text(doc, 'div/bar') is None)
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
def test_smuggle_url(self):
data = {"ö": "ö", "abc": [3]}
url = 'https://foo.bar/baz?x=y#a'
smug_url = smuggle_url(url, data)
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
self.assertEqual(url, unsmug_url)
self.assertEqual(data, unsmug_data)
res_url, res_data = unsmuggle_url(url)
self.assertEqual(res_url, url)
self.assertEqual(res_data, None)
def test_shell_quote(self):
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
self.assertEqual(shell_quote(args), """ffmpeg -i 'ñ€ß'"'"'.mp4'""")
def test_str_to_int(self):
self.assertEqual(str_to_int('123,456'), 123456)
self.assertEqual(str_to_int('123.456'), 123456)
def test_url_basename(self):
self.assertEqual(url_basename('http://foo.de/'), '')
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
self.assertEqual(
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
'trailer.mp4')
def test_parse_duration(self):
self.assertEqual(parse_duration(None), None)
self.assertEqual(parse_duration(False), None)
self.assertEqual(parse_duration('invalid'), None)
self.assertEqual(parse_duration('1'), 1)
self.assertEqual(parse_duration('1337:12'), 80232)
self.assertEqual(parse_duration('9:12:43'), 33163)
self.assertEqual(parse_duration('12:00'), 720)
self.assertEqual(parse_duration('00:01:01'), 61)
self.assertEqual(parse_duration('x:y'), None)
self.assertEqual(parse_duration('3h11m53s'), 11513)
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
self.assertEqual(parse_duration('62m45s'), 3765)
self.assertEqual(parse_duration('6m59s'), 419)
self.assertEqual(parse_duration('49s'), 49)
self.assertEqual(parse_duration('0h0m0s'), 0)
self.assertEqual(parse_duration('0m0s'), 0)
self.assertEqual(parse_duration('0s'), 0)
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
self.assertEqual(parse_duration('T30M38S'), 1838)
self.assertEqual(parse_duration('5 s'), 5)
self.assertEqual(parse_duration('3 min'), 180)
self.assertEqual(parse_duration('2.5 hours'), 9000)
self.assertEqual(parse_duration('02:03:04'), 7384)
self.assertEqual(parse_duration('01:02:03:04'), 93784)
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
self.assertEqual(parse_duration('87 Min.'), 5220)
def test_fix_xml_ampersands(self):
self.assertEqual(
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
self.assertEqual(
fix_xml_ampersands('"&x=y&wrong;&z=a'),
'"&x=y&wrong;&z=a')
self.assertEqual(
fix_xml_ampersands('&'><"'),
'&'><"')
self.assertEqual(
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
def test_paged_list(self):
def testPL(size, pagesize, sliceargs, expected):
def get_page(pagenum):
firstid = pagenum * pagesize
upto = min(size, pagenum * pagesize + pagesize)
for i in range(firstid, upto):
yield i
pl = OnDemandPagedList(get_page, pagesize)
got = pl.getslice(*sliceargs)
self.assertEqual(got, expected)
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
got = iapl.getslice(*sliceargs)
self.assertEqual(got, expected)
testPL(5, 2, (), [0, 1, 2, 3, 4])
testPL(5, 2, (1,), [1, 2, 3, 4])
testPL(5, 2, (2,), [2, 3, 4])
testPL(5, 2, (4,), [4])
testPL(5, 2, (0, 3), [0, 1, 2])
testPL(5, 2, (1, 4), [1, 2, 3])
testPL(5, 2, (2, 99), [2, 3, 4])
testPL(5, 2, (20, 99), [])
def test_struct_unpack(self):
self.assertEqual(struct_unpack('!B', b'\x00'), (0,))
def test_read_batch_urls(self):
f = io.StringIO('''\xef\xbb\xbf foo
bar\r
baz
# More after this line\r
; or after this
bam''')
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
def test_urlencode_postdata(self):
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
self.assertTrue(isinstance(data, bytes))
def test_parse_iso8601(self):
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
def test_strip_jsonp(self):
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
d = json.loads(stripped)
self.assertEqual(d, [{"id": "532cb", "x": 3}])
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
d = json.loads(stripped)
self.assertEqual(d, {'STATUS': 'OK'})
def test_uppercase_escape(self):
self.assertEqual(uppercase_escape('aä'), 'aä')
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
def test_lowercase_escape(self):
self.assertEqual(lowercase_escape('aä'), 'aä')
self.assertEqual(lowercase_escape('\\u0026'), '&')
def test_limit_length(self):
self.assertEqual(limit_length(None, 12), None)
self.assertEqual(limit_length('foo', 12), 'foo')
self.assertTrue(
limit_length('foo bar baz asd', 12).startswith('foo bar'))
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
def test_escape_rfc3986(self):
reserved = "!*'();:@&=+$,/?#[]"
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
self.assertEqual(escape_rfc3986(reserved), reserved)
self.assertEqual(escape_rfc3986(unreserved), unreserved)
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
def test_escape_url(self):
self.assertEqual(
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
)
self.assertEqual(
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
)
self.assertEqual(
escape_url('http://тест.рф/фрагмент'),
'http://тест.рф/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
)
self.assertEqual(
escape_url('http://тест.рф/абв?абв=абв#абв'),
'http://тест.рф/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
)
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
def test_js_to_json_realworld(self):
inp = '''{
'clip':{'provider':'pseudo'}
}'''
self.assertEqual(js_to_json(inp), '''{
"clip":{"provider":"pseudo"}
}''')
json.loads(js_to_json(inp))
inp = '''{
'playlist':[{'controls':{'all':null}}]
}'''
self.assertEqual(js_to_json(inp), '''{
"playlist":[{"controls":{"all":null}}]
}''')
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
json_code = js_to_json(inp)
self.assertEqual(json.loads(json_code), json.loads(inp))
def test_js_to_json_edgecases(self):
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
on = js_to_json('{"abc": true}')
self.assertEqual(json.loads(on), {'abc': True})
# Ignore JavaScript code as well
on = js_to_json('''{
"x": 1,
y: "a",
z: some.code
}''')
d = json.loads(on)
self.assertEqual(d['x'], 1)
self.assertEqual(d['y'], 'a')
on = js_to_json('["abc", "def",]')
self.assertEqual(json.loads(on), ['abc', 'def'])
on = js_to_json('{"abc": "def",}')
self.assertEqual(json.loads(on), {'abc': 'def'})
def test_clean_html(self):
self.assertEqual(clean_html('a:\nb'), 'a: b')
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
def test_intlist_to_bytes(self):
self.assertEqual(
intlist_to_bytes([0, 1, 127, 128, 255]),
b'\x00\x01\x7f\x80\xff')
def test_args_to_str(self):
self.assertEqual(
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
'foo ba/r -baz \'2 be\' \'\''
)
def test_parse_filesize(self):
self.assertEqual(parse_filesize(None), None)
self.assertEqual(parse_filesize(''), None)
self.assertEqual(parse_filesize('91 B'), 91)
self.assertEqual(parse_filesize('foobar'), None)
self.assertEqual(parse_filesize('2 MiB'), 2097152)
self.assertEqual(parse_filesize('5 GB'), 5000000000)
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
self.assertEqual(parse_filesize('1,24 KB'), 1240)
def test_version_tuple(self):
self.assertEqual(version_tuple('1'), (1,))
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
def test_detect_exe_version(self):
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
configuration: --prefix=/usr --extra-'''), '1.2.1')
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
Trying to open render node...
Success at /dev/dri/renderD128.
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
def test_age_restricted(self):
self.assertFalse(age_restricted(None, 10)) # unrestricted content
self.assertFalse(age_restricted(1, None)) # unrestricted policy
self.assertFalse(age_restricted(8, 10))
self.assertTrue(age_restricted(18, 14))
self.assertFalse(age_restricted(18, 18))
def test_is_html(self):
self.assertFalse(is_html(b'\x49\x44\x43<html'))
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-8 with BOM
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
self.assertTrue(is_html( # UTF-16-LE
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
))
self.assertTrue(is_html( # UTF-16-BE
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
))
self.assertTrue(is_html( # UTF-32-BE
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
self.assertTrue(is_html( # UTF-32-LE
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
def test_render_table(self):
self.assertEqual(
render_table(
['a', 'bcd'],
[[123, 4], [9999, 51]]),
'a bcd\n'
'123 4\n'
'9999 51')
def test_match_str(self):
self.assertRaises(ValueError, match_str, 'xy>foobar', {})
self.assertFalse(match_str('xy', {'x': 1200}))
self.assertTrue(match_str('!xy', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 1200}))
self.assertFalse(match_str('!x', {'x': 1200}))
self.assertTrue(match_str('x', {'x': 0}))
self.assertFalse(match_str('x>0', {'x': 0}))
self.assertFalse(match_str('x>0', {}))
self.assertTrue(match_str('x>?0', {}))
self.assertTrue(match_str('x>1K', {'x': 1200}))
self.assertFalse(match_str('x>2K', {'x': 1200}))
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 90, 'description': 'foo'}))
self.assertTrue(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
self.assertFalse(match_str(
'like_count > 100 & dislike_count <? 50 & description',
{'like_count': 190, 'dislike_count': 10}))
def test_parse_dfxp_time_expr(self):
self.assertEqual(parse_dfxp_time_expr(None), 0.0)
self.assertEqual(parse_dfxp_time_expr(''), 0.0)
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
def test_dfxp2srt(self):
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
<p begin="1" end="2">第二行<br/>♪♪</p>
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The following line contains Chinese characters and special symbols
2
00:00:01,000 --> 00:00:02,000
第二行
♪♪
3
00:00:02,000 --> 00:00:03,000
Third
Line
'''
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
dfxp_data_no_default_namespace = '''<?xml version="1.0" encoding="UTF-8"?>
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
<body>
<div xml:lang="en">
<p begin="0" end="1">The first line</p>
</div>
</body>
</tt>'''
srt_data = '''1
00:00:00,000 --> 00:00:01,000
The first line
'''
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
if __name__ == '__main__':
unittest.main()
| unlicense |
CopeX/odoo | addons/stock_dropshipping/stock_dropshipping.py | 46 | 1896 | # coding: utf-8
from openerp import models, api, _
from openerp.exceptions import Warning
class sale_order_line(models.Model):
_inherit = 'sale.order.line'
@api.multi
def _check_routing(self, product, warehouse):
""" skip stock verification if the route goes from supplier to customer
As the product never goes in stock, no need to verify it's availibility
"""
res = super(sale_order_line, self)._check_routing(product, warehouse)
if not res:
for line in self:
for pull_rule in line.route_id.pull_ids:
if (pull_rule.picking_type_id.default_location_src_id.usage == 'supplier' and
pull_rule.picking_type_id.default_location_dest_id.usage == 'customer'):
res = True
break
return res
class purchase_order(models.Model):
_inherit = 'purchase.order'
@api.one
def _check_invoice_policy(self):
if self.invoice_method == 'picking' and self.location_id.usage == 'customer':
for proc in self.order_line.mapped('procurement_ids'):
if proc.sale_line_id.order_id.order_policy == 'picking':
raise Warning(_('In the case of a dropship route, it is not possible to have an invoicing control set on "Based on incoming shipments" and a sale order with an invoice creation on "On Delivery Order"'))
@api.multi
def wkf_confirm_order(self):
""" Raise a warning to forbid to have both purchase and sale invoices
policies at delivery in dropshipping. As it is not implemented.
This check can be disabled setting 'no_invoice_policy_check' in context
"""
if not self.env.context.get('no_invoice_policy_check'):
self._check_invoice_policy()
super(purchase_order, self).wkf_confirm_order()
| agpl-3.0 |
ratoaq2/Flexget | flexget/plugins/filter/subtitle_queue.py | 8 | 19097 | from __future__ import unicode_literals, division, absolute_import
import glob
import logging
import os
import urllib
import urlparse
import os.path
from sqlalchemy import Column, Integer, String, ForeignKey, or_, and_, DateTime, Boolean
from sqlalchemy.orm import backref, relationship
from sqlalchemy.schema import Table
from datetime import datetime, date, time
from flexget import db_schema, plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import with_session
from flexget.utils.template import RenderError
from flexget.utils.tools import parse_timedelta
try:
from babelfish import Language
except ImportError:
raise plugin.DependencyError(issued_by='subtitle_queue', missing='babelfish',
message='subtitle_queue requires the babelfish plugin')
log = logging.getLogger('subtitle_queue')
Base = db_schema.versioned_base('subtitle_queue', 0)
#: Video extensions stolen from https://github.com/Diaoul/subliminal/blob/master/subliminal/video.py
VIDEO_EXTENSIONS = ('.3g2', '.3gp', '.3gp2', '.3gpp', '.60d', '.ajp', '.asf', '.asx', '.avchd', '.avi', '.bik',
'.bix', '.box', '.cam', '.dat', '.divx', '.dmf', '.dv', '.dvr-ms', '.evo', '.flc', '.fli',
'.flic', '.flv', '.flx', '.gvi', '.gvp', '.h264', '.m1v', '.m2p', '.m2ts', '.m2v', '.m4e',
'.m4v', '.mjp', '.mjpeg', '.mjpg', '.mkv', '.moov', '.mov', '.movhd', '.movie', '.movx', '.mp4',
'.mpe', '.mpeg', '.mpg', '.mpv', '.mpv2', '.mxf', '.nsv', '.nut', '.ogg', '.ogm', '.omf', '.ps',
'.qt', '.ram', '.rm', '.rmvb', '.swf', '.ts', '.vfw', '.vid', '.video', '.viv', '.vivo', '.vob',
'.vro', '.wm', '.wmv', '.wmx', '.wrap', '.wvx', '.wx', '.x264', '.xvid')
SUBTITLE_EXTENSIONS = ('.srt', '.sub', '.smi', '.txt', '.ssa', '.ass', '.mpl') # Borrowed from Subliminal
association_table = Table('association', Base.metadata,
Column('sub_queue_id', Integer, ForeignKey('subtitle_queue.id')),
Column('lang_id', Integer, ForeignKey('subtitle_language.id'))
)
def normalize_path(path):
return os.path.normcase(os.path.abspath(path)) if path else None
class SubtitleLanguages(Base):
__tablename__ = 'subtitle_language'
id = Column(Integer, primary_key=True)
language = Column(String, unique=True, index=True)
def __init__(self, language):
self.language = unicode(Language.fromietf(language))
def __str__(self):
return '<SubtitleLanguage(%s)>' % self.language
class QueuedSubtitle(Base):
__tablename__ = 'subtitle_queue'
id = Column(Integer, primary_key=True)
title = Column(String) # Not completely necessary
path = Column(String, unique=True, nullable=False) # Absolute path of file to fetch subtitles for
alternate_path = Column(String, unique=True) # Absolute path of file to fetch subtitles for
added = Column(DateTime) # Used to determine age
stop_after = Column(String)
downloaded = Column(Boolean)
languages = relationship(SubtitleLanguages, secondary=association_table, backref="primary", lazy='joined')
def __init__(self, path, alternate_path, title, stop_after="7 days"):
self.path = normalize_path(path)
if alternate_path:
self.alternate_path = normalize_path(alternate_path)
self.added = datetime.now()
self.stop_after = stop_after
self.title = title
self.downloaded = False
def __str__(self):
lang = None if not self.languages else self.languages[0]
return '<SubtitleQueue(%s, %s, %s)>' % (self.path, self.added, lang)
class SubtitleQueue(object):
schema = {
"oneOf": [
{
'type': 'object',
'properties': {
'action': {'type': 'string', 'enum': ['add', 'remove']},
'stop_after': {'type': 'string', 'format': 'interval'},
'languages': {"oneOf": [
{'type': 'array', 'items': {'type': 'string'}, 'minItems': 1},
{'type': 'string'},
]},
'path': {'type': 'string'},
'alternate_path': {'type': 'string'},
},
'required': ['action'],
'additionalProperties': False
},
{
'type': 'string', 'enum': ['emit']
},
{
'type': 'object',
'properties': {
'action': {'type': 'string', 'enum': ['emit']},
'remove_not_found': {'type': 'boolean', 'default': False},
},
'required': ['action'],
'additionalProperties': False
}
]
}
failed_paths = {}
def prepare_config(self, config):
if isinstance(config, basestring):
config = {'action': config, 'remove_not_found': False}
return config
def on_task_start(self, task, config):
self.failed_paths = {}
def complete(self, entry, task=None, path=None, **kwargs):
with Session() as session:
item = session.query(QueuedSubtitle).filter(or_(QueuedSubtitle.path == path,
QueuedSubtitle.alternate_path == path)).first()
if 'subtitles_missing' in entry and not entry['subtitles_missing']:
entry.accept()
if not self.failed_paths.get(path):
item.downloaded = True
elif 'subtitles_missing' in entry:
self.failed_paths[path] = True
item.downloaded = False
entry.fail()
def emit(self, task, config):
if not config:
return
entries = []
with Session() as session:
for sub_item in queue_get(session=session):
if os.path.exists(sub_item.path):
path = sub_item.path
elif sub_item.alternate_path and os.path.exists(sub_item.alternate_path):
path = sub_item.alternate_path
elif not config['remove_not_found'] and \
sub_item.added + parse_timedelta('24 hours') > datetime.combine(date.today(), time()):
log.warning('File %s was not found. Deleting after %s.' %
(sub_item.path, unicode(sub_item.added + parse_timedelta('24 hours'))))
continue
else:
log.error('File not found. Removing "%s" from queue.' % sub_item.title)
session.delete(sub_item)
continue
if os.path.isdir(path):
paths = os.listdir(path)
if not paths:
log.warning('Queued folder %s is empty.' % path)
continue
path_dir = path
else:
paths = [path]
path_dir = os.path.dirname(path)
primary = set()
for language in sub_item.languages:
primary.add(Language.fromietf(language.language))
for file in paths:
entry = Entry()
if not file.lower().endswith(VIDEO_EXTENSIONS):
continue
file = normalize_path(os.path.join(path_dir, file))
entry['url'] = urlparse.urljoin('file:', urllib.pathname2url(file.encode('utf-8')))
entry['location'] = file
entry['title'] = os.path.splitext(os.path.basename(file))[0] # filename without ext
entry['subtitle_languages'] = primary
try:
import subliminal
try:
video = subliminal.scan_video(normalize_path(file))
if primary and not primary - video.subtitle_languages:
log.debug('All subtitles already fetched for %s.' % entry['title'])
sub_item.downloaded = True
continue
except ValueError as e:
log.error('Invalid video file: %s. Removing %s from queue.' % (e, entry['title']))
session.delete(sub_item)
continue
except ImportError:
log.debug('Falling back to simple check since Subliminal is not installed.')
# use glob since subliminal is not there
path_no_ext = os.path.splitext(normalize_path(file))[0]
# can only check subtitles that have explicit language codes in the file name
if primary:
files = glob.glob(path_no_ext + "*")
files = [item.lower() for item in files]
for lang in primary:
if not any('%s.%s' % (path_no_ext, lang) and
f.lower().endswith(SUBTITLE_EXTENSIONS) for f in files):
break
else:
log.debug('All subtitles already fetched for %s.' % entry['title'])
sub_item.downloaded = True
continue
entry.on_complete(self.complete, path=path, task=task)
entries.append(entry)
log.debug('Emitting entry for %s.' % entry['title'])
return entries
def on_task_filter(self, task, config):
config = self.prepare_config(config)
if config['action'] is 'emit':
for entry in task.entries:
entry.accept()
def on_task_input(self, task, config):
config = self.prepare_config(config)
if config['action'] != 'emit':
return
return self.emit(task, config)
def on_task_output(self, task, config):
config = self.prepare_config(config)
if not config or config['action'] is 'emit':
return
action = config.get('action')
for entry in task.accepted:
try:
if action == 'add':
# is it a local file?
if 'location' in entry:
try:
path = entry.render(config.get('path', entry['location']))
alternate_path = entry.render(config.get('alternate_path', ''))
queue_add(path, entry.get('title', ''), config, alternate_path=alternate_path,
location=entry['location'])
except RenderError as ex:
# entry.fail('Invalid entry field %s for %s.' % (config['path'], entry['title']))
log.error('Could not render: %s. Please check your config.' % ex)
break
# or is it a torrent?
elif 'torrent' in entry and 'content_files' in entry:
if 'path' not in config:
log.error('No path set for non-local file. Don\'t know where to look.')
break
# try to render
try:
path = entry.render(config['path'])
alternate_path = entry.render(config.get('alternate_path', ''))
except RenderError as ex:
# entry.fail('Invalid entry field %s for %s.' % (config['path'], entry['title']))
log.error('Could not render: %s. Please check your config.' % ex)
break
files = entry['content_files']
if len(files) == 1:
title = files[0]
# TODO: use content_filename in case of single-file torrents?
# How to handle path and alternate_path in this scenario?
ext = os.path.splitext(title)[1]
if 'content_filename' in entry:
# need to queue the content_filename as alternate
if not alternate_path:
alternate_path = os.path.join(path, entry['content_filename'] + ext)
else:
# if alternate_path already exists, then we simply change it
alternate_path = os.path.join(alternate_path,
entry['content_filename'] + ext)
else:
path = os.path.join(path, title)
if alternate_path:
alternate_path = os.path.join(alternate_path, title)
else:
# title of the torrent is usually the name of the folder
title = entry['torrent'].content['info']['name']
path = os.path.join(path, title)
if alternate_path:
alternate_path = os.path.join(alternate_path, title)
queue_add(path, title, config, alternate_path=alternate_path)
else:
# should this really happen though?
entry.reject('Invalid entry. Not a torrent or local file.')
elif action == 'remove':
if entry.get('location', ''):
queue_del(entry['location'])
else:
entry.reject('Not a local file. Cannot remove non-local files.')
except QueueError as e:
# ignore already in queue
if e.errno != 1:
entry.fail('ERROR: %s' % e.message)
@with_session
def queue_add(path, title, config, alternate_path=None, location=None, session=None):
path = normalize_path(path)
alternate_path = normalize_path(alternate_path)
location = normalize_path(location)
conditions = [QueuedSubtitle.path == path, QueuedSubtitle.alternate_path == path]
if location:
conditions.extend([QueuedSubtitle.path == location, QueuedSubtitle.alternate_path == location])
item = session.query(QueuedSubtitle).filter(or_(*conditions)).first()
primary = make_lang_list(config.get('languages', []), session=session)
if item:
log.info('%s: Already queued. Updating values.' % item.title)
queue_edit(path, alternate_path, title, config, location=location, session=session)
else:
if config.get('stop_after', ''):
item = QueuedSubtitle(path, alternate_path, title, stop_after=config.get('stop_after'))
else:
item = QueuedSubtitle(path, alternate_path, title)
session.add(item)
item.languages = primary
log.info('Added %s to queue with langs: [%s].' %
(item.path, ', '.join([unicode(s.language) for s in item.languages])))
return True
@with_session
def queue_del(path, session=None):
path = normalize_path(path)
item = session.query(QueuedSubtitle).filter(or_(QueuedSubtitle.path == path,
QueuedSubtitle.alternate_path == path)).first()
if not item:
# Not necessarily an error?
raise QueueError("Cannot remove %s, not in the queue." % path)
else:
log.debug('Removed %s.' % item.path)
session.delete(item)
return item.path
@with_session
def queue_edit(src, dest, title, config, location=None, session=None):
src = normalize_path(src)
dest = normalize_path(dest)
location = normalize_path(location)
conditions = [QueuedSubtitle.path == src, QueuedSubtitle.alternate_path == src]
if location:
conditions.extend([QueuedSubtitle.path == location, QueuedSubtitle.alternate_path == location])
item = session.query(QueuedSubtitle).filter(or_(*conditions)).first()
# item should exist, but this check might be needed in the future
if not item:
# tried to edit a non-queued item. Add it.
queue_add(dest, title, config, alternate_path=src, location=location, session=session)
else:
if item.downloaded:
log.info('All subtitles have already been downloaded. Not updating values.')
return
if item.path != src:
item.path = src
if item.alternate_path != dest:
item.alternate_path = dest
# if there is a stop_after value, then it is refreshed in the db
if config.get('stop_after', ''):
item.stop_after = config['stop_after']
item.added = datetime.now()
if config.get('languages'):
primary = make_lang_list(config.get('languages', []), session=session)
item.languages = primary
log.info('Updated values for %s.' % item.title)
@with_session
def queue_get(session=None):
subs = session.query(QueuedSubtitle).filter(QueuedSubtitle.downloaded == False).all()
# remove any items that have expired
for sub_item in subs:
if sub_item.added + parse_timedelta(sub_item.stop_after) < datetime.combine(date.today(), time()):
log.debug('%s has expired. Removing.' % sub_item.title)
subs.remove(sub_item)
session.delete(sub_item)
return subs
# must always pass the session
@with_session
def get_lang(lang, session=None):
return session.query(SubtitleLanguages).filter(SubtitleLanguages.language == unicode(lang)).first()
@with_session
def make_lang_list(languages, session=None):
if not isinstance(languages, list):
languages = [languages]
# TODO: find better way of enforcing uniqueness without catching exceptions or doing dumb shit like this
languages = set([unicode(Language.fromietf(l)) for l in languages])
result = set()
for language in languages:
lang = get_lang(language, session=session) or SubtitleLanguages(unicode(language))
result.add(lang)
return list(result)
class QueueError(Exception):
"""Exception raised if there is an error with a queue operation"""
# TODO: taken from movie_queue
def __init__(self, message, errno=0):
self.message = message
self.errno = errno
@event('plugin.register')
def register_plugin():
plugin.register(SubtitleQueue, 'subtitle_queue', api_ver=2)
| mit |
KitKatXperience/platform_external_chromium_org | build/android/gyp/jar.py | 35 | 1915 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import optparse
import os
import sys
from util import build_utils
from util import md5_check
def DoJar(options):
class_files = build_utils.FindInDirectory(options.classes_dir, '*.class')
for exclude in build_utils.ParseGypList(options.excluded_classes):
class_files = filter(
lambda f: not fnmatch.fnmatch(f, exclude), class_files)
jar_path = os.path.abspath(options.jar_path)
# The paths of the files in the jar will be the same as they are passed in to
# the command. Because of this, the command should be run in
# options.classes_dir so the .class file paths in the jar are correct.
jar_cwd = options.classes_dir
class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
jar_cmd = ['jar', 'cf0', jar_path] + class_files_rel
record_path = '%s.md5.stamp' % options.jar_path
md5_check.CallAndRecordIfStale(
lambda: build_utils.CheckCallDie(jar_cmd, cwd=jar_cwd),
record_path=record_path,
input_paths=class_files,
input_strings=jar_cmd)
build_utils.Touch(options.jar_path)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--classes-dir', help='Directory containing .class files.')
parser.add_option('--jar-path', help='Jar output path.')
parser.add_option('--excluded-classes',
help='List of .class file patterns to exclude from the jar.')
parser.add_option('--stamp', help='Path to touch on success.')
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
parser.add_option('--ignore', help='Ignored.')
options, _ = parser.parse_args()
DoJar(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
Inspq/ansible | lib/ansible/modules/notification/jabber.py | 17 | 4809 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
version_added: "1.2"
module: jabber
short_description: Send a message to jabber user or chat room
description:
- Send a message to jabber
options:
user:
description:
- User as which to connect
required: true
password:
description:
- password for user to connect
required: true
to:
description:
- user ID or name of the room, when using room use a slash to indicate your nick.
required: true
msg:
description:
- The message body.
required: true
default: null
host:
description:
- host to connect, overrides user info
required: false
port:
description:
- port to connect to, overrides default
required: false
default: 5222
encoding:
description:
- message encoding
required: false
# informational: requirements for nodes
requirements:
- python xmpp (xmpppy)
author: "Brian Coca (@bcoca)"
'''
EXAMPLES = '''
# send a message to a user
- jabber:
user: mybot@example.net
password: secret
to: friend@example.net
msg: Ansible task finished
# send a message to a room
- jabber:
user: mybot@example.net
password: secret
to: mychaps@conference.example.net/ansiblebot
msg: Ansible task finished
# send a message, specifying the host and port
- jabber:
user: mybot@example.net
host: talk.example.net
port: 5223
password: secret
to: mychaps@example.net
msg: Ansible task finished
'''
import os
import re
import time
HAS_XMPP = True
try:
import xmpp
except ImportError:
HAS_XMPP = False
def main():
module = AnsibleModule(
argument_spec=dict(
user=dict(required=True),
password=dict(required=True, no_log=True),
to=dict(required=True),
msg=dict(required=True),
host=dict(required=False),
port=dict(required=False,default=5222),
encoding=dict(required=False),
),
supports_check_mode=True
)
if not HAS_XMPP:
module.fail_json(msg="The required python xmpp library (xmpppy) is not installed")
jid = xmpp.JID(module.params['user'])
user = jid.getNode()
server = jid.getDomain()
port = module.params['port']
password = module.params['password']
try:
to, nick = module.params['to'].split('/', 1)
except ValueError:
to, nick = module.params['to'], None
if module.params['host']:
host = module.params['host']
else:
host = server
if module.params['encoding']:
xmpp.simplexml.ENCODING = module.params['encoding']
msg = xmpp.protocol.Message(body=module.params['msg'])
try:
conn=xmpp.Client(server, debug=[])
if not conn.connect(server=(host,port)):
module.fail_json(rc=1, msg='Failed to connect to server: %s' % (server))
if not conn.auth(user,password,'Ansible'):
module.fail_json(rc=1, msg='Failed to authorize %s on: %s' % (user,server))
# some old servers require this, also the sleep following send
conn.sendInitPresence(requestRoster=0)
if nick: # sending to room instead of user, need to join
msg.setType('groupchat')
msg.setTag('x', namespace='http://jabber.org/protocol/muc#user')
conn.send(xmpp.Presence(to=module.params['to']))
time.sleep(1)
else:
msg.setType('chat')
msg.setTo(to)
if not module.check_mode:
conn.send(msg)
time.sleep(1)
conn.disconnect()
except Exception:
e = get_exception()
module.fail_json(msg="unable to send msg: %s" % e)
module.exit_json(changed=False, to=to, user=user, msg=msg.getBody())
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
| gpl-3.0 |
proyectosdeley/proyectos_de_ley | proyectos_de_ley/pdl_scraper/spiders/proyecto_spider_2006.py | 1 | 5398 | # -*- coding: utf-8 -*-
import re
import short_url
import scrapy
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from pdl_scraper.items import PdlScraperItem
LEGISLATURA = 2006
class ProyectoSpider2006(CrawlSpider):
name = "proyecto_2006"
allowed_domains = ["www2.congreso.gob.pe"]
rules = (
Rule(LinkExtractor(allow=('OpenDocument$',)), callback='parse_item'),
)
def __init__(self, *args, **kwargs):
super(ProyectoSpider2006, self).__init__(*args, **kwargs)
self.legislatura = LEGISLATURA
def start_requests(self):
base_url = (
'http://www2.congreso.gob.pe/Sicr/TraDocEstProc/CLProLey2006.nsf/Numinversopa?OpenView&Start='
)
pages = range(1, 5000, 499)
for page in pages:
url = f'{base_url}{page}'
yield scrapy.Request(url=url)
def parse_item(self, response):
self.log("this is the url: %s" % response.url)
item = PdlScraperItem()
item['codigo'] = ''
item['legislatura'] = self.legislatura
item['numero_proyecto'] = ''
item['congresistas'] = ''
item['titulo'] = ''
item['short_url'] = ''
item['fecha_presentacion'] = ''
item['expediente'] = ''
item['seguimiento_page'] = ''
item['proponente'] = ''
item['grupo_parlamentario'] = ''
item['iniciativas_agrupadas'] = ''
item['nombre_comision'] = ''
item['titulo_de_ley'] = ''
item['numero_de_ley'] = ''
selectors = response.xpath("//input")
for sel in selectors:
attr_name = sel.xpath('@name').extract()[0]
if attr_name == 'CodIni':
item['codigo'] = sel.xpath('@value').extract()[0]
if attr_name == 'CodIni_web_1':
item['numero_proyecto'] = sel.xpath('@value').extract()[0]
if attr_name == 'DesPerio':
item['periodo'] = sel.xpath('@value').extract()[0]
if attr_name == 'DesLegis':
item['legislatura2'] = sel.xpath('@value').extract()[0]
if attr_name == 'fechapre':
item['fecha_presentacion'] = sel.xpath('@value').extract()[0]
if attr_name == 'DesPropo':
item['proponente'] = sel.xpath('@value').extract()[0]
if attr_name == 'DesGrupParla':
item['grupo_parlamentario'] = sel.xpath('@value').extract()[0]
if attr_name == 'TitIni':
item['titulo'] = sel.xpath('@value').extract()[0]
if attr_name == 'Titulo':
item['titulo2'] = sel.xpath('@value').extract()[0]
if attr_name == 'SumIni':
item['sumilla'] = sel.xpath('@value').extract()[0]
if attr_name == 'NomCongre':
item['congresistas'] = sel.xpath('@value').extract()[0]
if attr_name == 'CodIniSecu':
item['iniciativas_agrupadas'] = sel.xpath('@value').extract()[0]
if attr_name == 'NumLey':
item['numero_de_ley'] = sel.xpath('@value').extract()[0]
if attr_name == 'TitLey':
item['titulo_de_ley'] = sel.xpath('@value').extract()[0]
if attr_name == 'NombreDeLaComision':
item['nombre_comision'] = sel.xpath('@value').extract()[0]
item['expediente'] = "http://www2.congreso.gob.pe/sicr/tradocestproc/Expvirt_2011.nsf/" \
"visbusqptramdoc1621/{}?opendocument".format(item['codigo'])
item['seguimiento_page'] = response.url
for sel in response.xpath('//td[@width="112"]'):
if sel.xpath('font/text()').extract()[0] == 'Seguimiento':
item['seguimiento'] = sel.xpath('following-sibling::*//text()').extract()
item['short_url'] = self.create_shorturl(item['codigo'])
return item
# self.log("Worked on item %s." % str(item['codigo']))
# request = scrapy.Request(
# item['expediente'],
# callback=self.parse_pdfurl,
# )
# request.meta['item'] = item
# return request
def parse_pdfurl(self, response):
item = response.meta['item']
codigo = item['codigo']
for sel in response.xpath("//a"):
href = sel.xpath("@href").extract()[0]
patterns = [
"\$FILE\/" + str(codigo) + "\.pdf$",
"\$FILE\/.+" + str(codigo) + "[0-9]+\.*-?\.pdf$",
"\$FILE\/.+" + str(codigo) + "[0-9]+\.PDF$",
"\/PL" + str(codigo) + "[0-9]+-?\.+pdf",
]
for pattern in patterns:
pattern = re.compile(pattern, re.IGNORECASE)
if re.search(pattern, href):
self.log("Found pdfurl for code: %s" % str(codigo))
item['pdf_url'] = href
return item
self.log("We failed to parse pdfurl for this project %s:" % str(codigo))
item['pdf_url'] = ''
return item
def create_shorturl(self, codigo):
"""
Use "legislatura" and codigo to build a short url.
:param codigo: Code for Proyecto de ley "03774"
:return: 4aw8ym
"""
mystring = "%s%s" % (self.legislatura, codigo)
url = short_url.encode_url(int(mystring))
return url
| mit |
syci/OCB | openerp/addons/base/module/wizard/base_language_install.py | 40 | 1792 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import tools
from openerp.osv import osv, fields
from openerp.tools.translate import _
class base_language_install(osv.osv_memory):
""" Install Language"""
_name = "base.language.install"
_description = "Install Language"
_columns = {
'lang': fields.selection(tools.scan_languages(),'Language', required=True),
'overwrite': fields.boolean('Overwrite Existing Terms', help="If you check this box, your customized translations will be overwritten and replaced by the official ones."),
'state':fields.selection([('init','init'),('done','done')], 'Status', readonly=True),
}
_defaults = {
'state': 'init',
'overwrite': False
}
def lang_install(self, cr, uid, ids, context=None):
if context is None:
context = {}
language_obj = self.browse(cr, uid, ids)[0]
lang = language_obj.lang
if lang:
modobj = self.pool.get('ir.module.module')
mids = modobj.search(cr, uid, [('state', '=', 'installed')])
if language_obj.overwrite:
context = {'overwrite': True}
modobj.update_translations(cr, uid, mids, lang, context or {})
self.write(cr, uid, ids, {'state': 'done'}, context=context)
return {
'name': _('Language Pack'),
'view_type': 'form',
'view_mode': 'form',
'view_id': False,
'res_model': 'base.language.install',
'domain': [],
'context': dict(context, active_ids=ids),
'type': 'ir.actions.act_window',
'target': 'new',
'res_id': ids and ids[0] or False,
}
| agpl-3.0 |
cctaylor/googleads-python-lib | examples/dfp/v201502/order_service/create_orders.py | 3 | 1835 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new orders.
To determine which orders exist, run get_all_orders.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
COMPANY_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
SALESPERSON_ID = 'INSERT_SALESPERSON_ID_HERE'
TRAFFICKER_ID = 'INSERT_TRAFFICKER_ID_HERE'
def main(client, company_id, salesperson_id, trafficker_id):
# Initialize appropriate service.
order_service = client.GetService('OrderService', version='v201502')
# Create order objects.
orders = []
for i in xrange(5):
order = {
'name': 'Order #%s' % uuid.uuid4(),
'advertiserId': company_id,
'salespersonId': salesperson_id,
'traffickerId': trafficker_id
}
orders.append(order)
# Add orders.
orders = order_service.createOrders(orders)
# Display results.
for order in orders:
print ('Order with id \'%s\' and name \'%s\' was created.'
% (order['id'], order['name']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, COMPANY_ID, SALESPERSON_ID, TRAFFICKER_ID)
| apache-2.0 |
q1ang/vnpy | vn.lts/pyscript/lts_struct.py | 47 | 80727 | # encoding: UTF-8
structDict = {}
#//////////////////////////////////////////////////////////////////////
#@company shanghai liber information Technology Co.,Ltd
#@file SecurityFtdcUserApiStruct.h
#@brief 定义业务数据结构
#//////////////////////////////////////////////////////////////////////
#响应信息
CSecurityFtdcRspInfoField = {}
#错误代码
CSecurityFtdcRspInfoField["ErrorID"] = "int"
#错误信息
CSecurityFtdcRspInfoField["ErrorMsg"] = "string"
structDict['CSecurityFtdcRspInfoField'] = CSecurityFtdcRspInfoField
#交易所
CSecurityFtdcExchangeField = {}
#交易所代码
CSecurityFtdcExchangeField["ExchangeID"] = "string"
#交易所名称
CSecurityFtdcExchangeField["ExchangeName"] = "string"
#交易所属性
CSecurityFtdcExchangeField["ExchangeProperty"] = "string"
structDict['CSecurityFtdcExchangeField'] = CSecurityFtdcExchangeField
#产品
CSecurityFtdcProductField = {}
#产品代码
CSecurityFtdcProductField["ProductID"] = "string"
#产品名称
CSecurityFtdcProductField["ProductName"] = "string"
#交易所代码
CSecurityFtdcProductField["ExchangeID"] = "string"
#产品类型
CSecurityFtdcProductField["ProductClass"] = "string"
#合约数量乘数
CSecurityFtdcProductField["VolumeMultiple"] = "int"
#最小变动价位
CSecurityFtdcProductField["PriceTick"] = "float"
#市价单最大下单量
CSecurityFtdcProductField["MaxMarketOrderVolume"] = "int"
#市价单最小下单量
CSecurityFtdcProductField["MinMarketOrderVolume"] = "int"
#限价单最大下单量
CSecurityFtdcProductField["MaxLimitOrderVolume"] = "int"
#限价单最小下单量
CSecurityFtdcProductField["MinLimitOrderVolume"] = "int"
#持仓类型
CSecurityFtdcProductField["PositionType"] = "string"
#持仓日期类型
CSecurityFtdcProductField["PositionDateType"] = "string"
#ETF最小交易单位
CSecurityFtdcProductField["EFTMinTradeVolume"] = "int"
structDict['CSecurityFtdcProductField'] = CSecurityFtdcProductField
#合约
CSecurityFtdcInstrumentField = {}
#合约代码
CSecurityFtdcInstrumentField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcInstrumentField["ExchangeID"] = "string"
#合约名称
CSecurityFtdcInstrumentField["InstrumentName"] = "string"
#合约在交易所的代码
CSecurityFtdcInstrumentField["ExchangeInstID"] = "string"
#产品代码
CSecurityFtdcInstrumentField["ProductID"] = "string"
#产品类型
CSecurityFtdcInstrumentField["ProductClass"] = "string"
#交割年份
CSecurityFtdcInstrumentField["DeliveryYear"] = "int"
#交割月
CSecurityFtdcInstrumentField["DeliveryMonth"] = "int"
#市价单最大下单量
CSecurityFtdcInstrumentField["MaxMarketOrderVolume"] = "int"
#市价单最小下单量
CSecurityFtdcInstrumentField["MinMarketOrderVolume"] = "int"
#限价单最大下单量
CSecurityFtdcInstrumentField["MaxLimitOrderVolume"] = "int"
#限价单最小下单量
CSecurityFtdcInstrumentField["MinLimitOrderVolume"] = "int"
#合约数量乘数
CSecurityFtdcInstrumentField["VolumeMultiple"] = "int"
#最小变动价位
CSecurityFtdcInstrumentField["PriceTick"] = "float"
#创建日
CSecurityFtdcInstrumentField["CreateDate"] = "string"
#上市日
CSecurityFtdcInstrumentField["OpenDate"] = "string"
#到期日
CSecurityFtdcInstrumentField["ExpireDate"] = "string"
#开始交割日
CSecurityFtdcInstrumentField["StartDelivDate"] = "string"
#结束交割日
CSecurityFtdcInstrumentField["EndDelivDate"] = "string"
#合约生命周期状态
CSecurityFtdcInstrumentField["InstLifePhase"] = "string"
#当前是否交易
CSecurityFtdcInstrumentField["IsTrading"] = "int"
#持仓类型
CSecurityFtdcInstrumentField["PositionType"] = "string"
#报单能否撤单
CSecurityFtdcInstrumentField["OrderCanBeWithdraw"] = "int"
#最小买下单单位
CSecurityFtdcInstrumentField["MinBuyVolume"] = "int"
#最小卖下单单位
CSecurityFtdcInstrumentField["MinSellVolume"] = "int"
#股票权限模版代码
CSecurityFtdcInstrumentField["RightModelID"] = "string"
#持仓交易类型
CSecurityFtdcInstrumentField["PosTradeType"] = "string"
#市场代码
CSecurityFtdcInstrumentField["MarketID"] = "string"
#期权执行价格
CSecurityFtdcInstrumentField["ExecPrice"] = "float"
#期权单手保证金
CSecurityFtdcInstrumentField["UnitMargin"] = "float"
#合约类型
CSecurityFtdcInstrumentField["InstrumentType"] = "string"
#期权保证金参数1
CSecurityFtdcInstrumentField["OptionsMarginParam1"] = "float"
#期权保证金参数2
CSecurityFtdcInstrumentField["OptionsMarginParam2"] = "float"
structDict['CSecurityFtdcInstrumentField'] = CSecurityFtdcInstrumentField
#经纪公司
CSecurityFtdcBrokerField = {}
#经纪公司代码
CSecurityFtdcBrokerField["BrokerID"] = "string"
#经纪公司简称
CSecurityFtdcBrokerField["BrokerAbbr"] = "string"
#经纪公司名称
CSecurityFtdcBrokerField["BrokerName"] = "string"
#是否活跃
CSecurityFtdcBrokerField["IsActive"] = "int"
structDict['CSecurityFtdcBrokerField'] = CSecurityFtdcBrokerField
#会员编码和经纪公司编码对照表
CSecurityFtdcPartBrokerField = {}
#经纪公司代码
CSecurityFtdcPartBrokerField["BrokerID"] = "string"
#交易所代码
CSecurityFtdcPartBrokerField["ExchangeID"] = "string"
#会员代码
CSecurityFtdcPartBrokerField["ParticipantID"] = "string"
#是否活跃
CSecurityFtdcPartBrokerField["IsActive"] = "int"
structDict['CSecurityFtdcPartBrokerField'] = CSecurityFtdcPartBrokerField
#投资者
CSecurityFtdcInvestorField = {}
#投资者代码
CSecurityFtdcInvestorField["InvestorID"] = "string"
#经纪公司代码
CSecurityFtdcInvestorField["BrokerID"] = "string"
#投资者分组代码
CSecurityFtdcInvestorField["InvestorGroupID"] = "string"
#投资者名称
CSecurityFtdcInvestorField["InvestorName"] = "string"
#证件类型
CSecurityFtdcInvestorField["IdentifiedCardType"] = "string"
#证件号码
CSecurityFtdcInvestorField["IdentifiedCardNo"] = "string"
#是否活跃
CSecurityFtdcInvestorField["IsActive"] = "int"
#上海营业部编号
CSecurityFtdcInvestorField["SHBranchID"] = "string"
#深圳营业部编号
CSecurityFtdcInvestorField["SZBranchID"] = "string"
#所属结算系统类型
CSecurityFtdcInvestorField["SettleSystemType"] = "string"
#投资者期权交易等级
CSecurityFtdcInvestorField["InvestorLevel"] = "string"
structDict['CSecurityFtdcInvestorField'] = CSecurityFtdcInvestorField
#交易编码
CSecurityFtdcTradingCodeField = {}
#投资者代码
CSecurityFtdcTradingCodeField["InvestorID"] = "string"
#经纪公司代码
CSecurityFtdcTradingCodeField["BrokerID"] = "string"
#交易所代码
CSecurityFtdcTradingCodeField["ExchangeID"] = "string"
#客户代码
CSecurityFtdcTradingCodeField["ClientID"] = "string"
#是否活跃
CSecurityFtdcTradingCodeField["IsActive"] = "int"
#AccountID
CSecurityFtdcTradingCodeField["AccountID"] = "string"
#交易单元号
CSecurityFtdcTradingCodeField["PBU"] = "string"
#ClientType
CSecurityFtdcTradingCodeField["ClientType"] = "string"
structDict['CSecurityFtdcTradingCodeField'] = CSecurityFtdcTradingCodeField
#管理用户
CSecurityFtdcSuperUserField = {}
#用户代码
CSecurityFtdcSuperUserField["UserID"] = "string"
#用户名称
CSecurityFtdcSuperUserField["UserName"] = "string"
#密码
CSecurityFtdcSuperUserField["Password"] = "string"
#是否活跃
CSecurityFtdcSuperUserField["IsActive"] = "int"
structDict['CSecurityFtdcSuperUserField'] = CSecurityFtdcSuperUserField
#管理用户功能权限
CSecurityFtdcSuperUserFunctionField = {}
#用户代码
CSecurityFtdcSuperUserFunctionField["UserID"] = "string"
#功能代码
CSecurityFtdcSuperUserFunctionField["FunctionCode"] = "string"
structDict['CSecurityFtdcSuperUserFunctionField'] = CSecurityFtdcSuperUserFunctionField
#经纪公司用户
CSecurityFtdcBrokerUserField = {}
#经纪公司代码
CSecurityFtdcBrokerUserField["BrokerID"] = "string"
#用户代码
CSecurityFtdcBrokerUserField["UserID"] = "string"
#用户名称
CSecurityFtdcBrokerUserField["UserName"] = "string"
#用户类型
CSecurityFtdcBrokerUserField["UserType"] = "string"
#是否活跃
CSecurityFtdcBrokerUserField["IsActive"] = "int"
#是否使用令牌
CSecurityFtdcBrokerUserField["IsUsingOTP"] = "int"
structDict['CSecurityFtdcBrokerUserField'] = CSecurityFtdcBrokerUserField
#经纪公司用户功能权限
CSecurityFtdcBrokerUserFunctionField = {}
#经纪公司代码
CSecurityFtdcBrokerUserFunctionField["BrokerID"] = "string"
#用户代码
CSecurityFtdcBrokerUserFunctionField["UserID"] = "string"
#经纪公司功能代码
CSecurityFtdcBrokerUserFunctionField["BrokerFunctionCode"] = "string"
structDict['CSecurityFtdcBrokerUserFunctionField'] = CSecurityFtdcBrokerUserFunctionField
#资金账户
CSecurityFtdcTradingAccountField = {}
#经纪公司代码
CSecurityFtdcTradingAccountField["BrokerID"] = "string"
#投资者帐号
CSecurityFtdcTradingAccountField["AccountID"] = "string"
#上次质押金额
CSecurityFtdcTradingAccountField["PreMortgage"] = "float"
#上次信用额度
CSecurityFtdcTradingAccountField["PreCredit"] = "float"
#上次存款额
CSecurityFtdcTradingAccountField["PreDeposit"] = "float"
#上次结算准备金
CSecurityFtdcTradingAccountField["PreBalance"] = "float"
#上次占用的保证金
CSecurityFtdcTradingAccountField["PreMargin"] = "float"
#利息基数
CSecurityFtdcTradingAccountField["InterestBase"] = "float"
#利息收入
CSecurityFtdcTradingAccountField["Interest"] = "float"
#入金金额
CSecurityFtdcTradingAccountField["Deposit"] = "float"
#出金金额
CSecurityFtdcTradingAccountField["Withdraw"] = "float"
#冻结的保证金
CSecurityFtdcTradingAccountField["FrozenMargin"] = "float"
#冻结的资金
CSecurityFtdcTradingAccountField["FrozenCash"] = "float"
#冻结的手续费
CSecurityFtdcTradingAccountField["FrozenCommission"] = "float"
#当前保证金总额
CSecurityFtdcTradingAccountField["CurrMargin"] = "float"
#资金差额
CSecurityFtdcTradingAccountField["CashIn"] = "float"
#手续费
CSecurityFtdcTradingAccountField["Commission"] = "float"
#结算准备金
CSecurityFtdcTradingAccountField["Balance"] = "float"
#现金
CSecurityFtdcTradingAccountField["Available"] = "float"
#可取资金
CSecurityFtdcTradingAccountField["WithdrawQuota"] = "float"
#基本准备金
CSecurityFtdcTradingAccountField["Reserve"] = "float"
#交易日
CSecurityFtdcTradingAccountField["TradingDay"] = "string"
#保证金可用余额
CSecurityFtdcTradingAccountField["Credit"] = "float"
#质押金额
CSecurityFtdcTradingAccountField["Mortgage"] = "float"
#交易所保证金
CSecurityFtdcTradingAccountField["ExchangeMargin"] = "float"
#投资者交割保证金
CSecurityFtdcTradingAccountField["DeliveryMargin"] = "float"
#交易所交割保证金
CSecurityFtdcTradingAccountField["ExchangeDeliveryMargin"] = "float"
#冻结的过户费
CSecurityFtdcTradingAccountField["FrozenTransferFee"] = "float"
#冻结的印花税
CSecurityFtdcTradingAccountField["FrozenStampTax"] = "float"
#过户费
CSecurityFtdcTradingAccountField["TransferFee"] = "float"
#印花税
CSecurityFtdcTradingAccountField["StampTax"] = "float"
#折算金额
CSecurityFtdcTradingAccountField["ConversionAmount"] = "float"
#授信额度
CSecurityFtdcTradingAccountField["CreditAmount"] = "float"
#证券总价值
CSecurityFtdcTradingAccountField["StockValue"] = "float"
#国债回购占用资金
CSecurityFtdcTradingAccountField["BondRepurchaseAmount"] = "float"
#国债逆回购占用资金
CSecurityFtdcTradingAccountField["ReverseRepurchaseAmount"] = "float"
#币种
CSecurityFtdcTradingAccountField["CurrencyCode"] = "string"
#账户类型
CSecurityFtdcTradingAccountField["AccountType"] = "string"
#融资买入金额
CSecurityFtdcTradingAccountField["MarginTradeAmount"] = "float"
#融券卖出金额
CSecurityFtdcTradingAccountField["ShortSellAmount"] = "float"
#融资持仓盈亏
CSecurityFtdcTradingAccountField["MarginTradeProfit"] = "float"
#融券持仓盈亏
CSecurityFtdcTradingAccountField["ShortSellProfit"] = "float"
#融券总市值
CSecurityFtdcTradingAccountField["SSStockValue"] = "float"
#维持担保比例
CSecurityFtdcTradingAccountField["CreditRatio"] = "float"
#行权冻结资金
CSecurityFtdcTradingAccountField["FrozenExecCash"] = "float"
structDict['CSecurityFtdcTradingAccountField'] = CSecurityFtdcTradingAccountField
#禁止登录用户
CSecurityFtdcLoginForbiddenUserField = {}
#经纪公司代码
CSecurityFtdcLoginForbiddenUserField["BrokerID"] = "string"
#用户代码
CSecurityFtdcLoginForbiddenUserField["UserID"] = "string"
structDict['CSecurityFtdcLoginForbiddenUserField'] = CSecurityFtdcLoginForbiddenUserField
#深度行情
CSecurityFtdcDepthMarketDataField = {}
#交易日
CSecurityFtdcDepthMarketDataField["TradingDay"] = "string"
#合约代码
CSecurityFtdcDepthMarketDataField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcDepthMarketDataField["ExchangeID"] = "string"
#合约在交易所的代码
CSecurityFtdcDepthMarketDataField["ExchangeInstID"] = "string"
#最新价
CSecurityFtdcDepthMarketDataField["LastPrice"] = "float"
#上次结算价
CSecurityFtdcDepthMarketDataField["PreSettlementPrice"] = "float"
#昨收盘
CSecurityFtdcDepthMarketDataField["PreClosePrice"] = "float"
#昨持仓量
CSecurityFtdcDepthMarketDataField["PreOpenInterest"] = "float"
#今开盘
CSecurityFtdcDepthMarketDataField["OpenPrice"] = "float"
#最高价
CSecurityFtdcDepthMarketDataField["HighestPrice"] = "float"
#最低价
CSecurityFtdcDepthMarketDataField["LowestPrice"] = "float"
#数量
CSecurityFtdcDepthMarketDataField["Volume"] = "int"
#成交金额
CSecurityFtdcDepthMarketDataField["Turnover"] = "float"
#持仓量
CSecurityFtdcDepthMarketDataField["OpenInterest"] = "float"
#今收盘
CSecurityFtdcDepthMarketDataField["ClosePrice"] = "float"
#本次结算价
CSecurityFtdcDepthMarketDataField["SettlementPrice"] = "float"
#涨停板价
CSecurityFtdcDepthMarketDataField["UpperLimitPrice"] = "float"
#跌停板价
CSecurityFtdcDepthMarketDataField["LowerLimitPrice"] = "float"
#昨虚实度
CSecurityFtdcDepthMarketDataField["PreDelta"] = "float"
#今虚实度
CSecurityFtdcDepthMarketDataField["CurrDelta"] = "float"
#最后修改时间
CSecurityFtdcDepthMarketDataField["UpdateTime"] = "string"
#最后修改毫秒
CSecurityFtdcDepthMarketDataField["UpdateMillisec"] = "int"
#申买价一
CSecurityFtdcDepthMarketDataField["BidPrice1"] = "float"
#申买量一
CSecurityFtdcDepthMarketDataField["BidVolume1"] = "int"
#申卖价一
CSecurityFtdcDepthMarketDataField["AskPrice1"] = "float"
#申卖量一
CSecurityFtdcDepthMarketDataField["AskVolume1"] = "int"
#申买价二
CSecurityFtdcDepthMarketDataField["BidPrice2"] = "float"
#申买量二
CSecurityFtdcDepthMarketDataField["BidVolume2"] = "int"
#申卖价二
CSecurityFtdcDepthMarketDataField["AskPrice2"] = "float"
#申卖量二
CSecurityFtdcDepthMarketDataField["AskVolume2"] = "int"
#申买价三
CSecurityFtdcDepthMarketDataField["BidPrice3"] = "float"
#申买量三
CSecurityFtdcDepthMarketDataField["BidVolume3"] = "int"
#申卖价三
CSecurityFtdcDepthMarketDataField["AskPrice3"] = "float"
#申卖量三
CSecurityFtdcDepthMarketDataField["AskVolume3"] = "int"
#申买价四
CSecurityFtdcDepthMarketDataField["BidPrice4"] = "float"
#申买量四
CSecurityFtdcDepthMarketDataField["BidVolume4"] = "int"
#申卖价四
CSecurityFtdcDepthMarketDataField["AskPrice4"] = "float"
#申卖量四
CSecurityFtdcDepthMarketDataField["AskVolume4"] = "int"
#申买价五
CSecurityFtdcDepthMarketDataField["BidPrice5"] = "float"
#申买量五
CSecurityFtdcDepthMarketDataField["BidVolume5"] = "int"
#申卖价五
CSecurityFtdcDepthMarketDataField["AskPrice5"] = "float"
#申卖量五
CSecurityFtdcDepthMarketDataField["AskVolume5"] = "int"
#当日均价
CSecurityFtdcDepthMarketDataField["AveragePrice"] = "float"
#业务日期
CSecurityFtdcDepthMarketDataField["ActionDay"] = "string"
structDict['CSecurityFtdcDepthMarketDataField'] = CSecurityFtdcDepthMarketDataField
#投资者合约交易权限
CSecurityFtdcInstrumentTradingRightField = {}
#合约代码
CSecurityFtdcInstrumentTradingRightField["InstrumentID"] = "string"
#投资者范围
CSecurityFtdcInstrumentTradingRightField["InvestorRange"] = "string"
#经纪公司代码
CSecurityFtdcInstrumentTradingRightField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInstrumentTradingRightField["InvestorID"] = "string"
#买卖
CSecurityFtdcInstrumentTradingRightField["Direction"] = "string"
#交易权限
CSecurityFtdcInstrumentTradingRightField["TradingRight"] = "string"
#交易所代码
CSecurityFtdcInstrumentTradingRightField["ExchangeID"] = "string"
#股票权限分类
CSecurityFtdcInstrumentTradingRightField["InstrumentRange"] = "string"
structDict['CSecurityFtdcInstrumentTradingRightField'] = CSecurityFtdcInstrumentTradingRightField
#投资者持仓明细
CSecurityFtdcInvestorPositionDetailField = {}
#合约代码
CSecurityFtdcInvestorPositionDetailField["InstrumentID"] = "string"
#经纪公司代码
CSecurityFtdcInvestorPositionDetailField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInvestorPositionDetailField["InvestorID"] = "string"
#投机套保标志
CSecurityFtdcInvestorPositionDetailField["HedgeFlag"] = "string"
#买卖
CSecurityFtdcInvestorPositionDetailField["Direction"] = "string"
#开仓日期
CSecurityFtdcInvestorPositionDetailField["OpenDate"] = "string"
#成交编号
CSecurityFtdcInvestorPositionDetailField["TradeID"] = "string"
#数量
CSecurityFtdcInvestorPositionDetailField["Volume"] = "int"
#开仓价
CSecurityFtdcInvestorPositionDetailField["OpenPrice"] = "float"
#交易日
CSecurityFtdcInvestorPositionDetailField["TradingDay"] = "string"
#成交类型
CSecurityFtdcInvestorPositionDetailField["TradeType"] = "string"
#交易所代码
CSecurityFtdcInvestorPositionDetailField["ExchangeID"] = "string"
#投资者保证金
CSecurityFtdcInvestorPositionDetailField["Margin"] = "float"
#交易所保证金
CSecurityFtdcInvestorPositionDetailField["ExchMargin"] = "float"
#昨结算价
CSecurityFtdcInvestorPositionDetailField["LastSettlementPrice"] = "float"
#结算价
CSecurityFtdcInvestorPositionDetailField["SettlementPrice"] = "float"
#平仓量
CSecurityFtdcInvestorPositionDetailField["CloseVolume"] = "int"
#平仓金额
CSecurityFtdcInvestorPositionDetailField["CloseAmount"] = "float"
#过户费
CSecurityFtdcInvestorPositionDetailField["TransferFee"] = "float"
#印花税
CSecurityFtdcInvestorPositionDetailField["StampTax"] = "float"
#手续费
CSecurityFtdcInvestorPositionDetailField["Commission"] = "float"
#AccountID
CSecurityFtdcInvestorPositionDetailField["AccountID"] = "string"
#质押入库数量
CSecurityFtdcInvestorPositionDetailField["PledgeInPosition"] = "int"
#质押入库冻结数量
CSecurityFtdcInvestorPositionDetailField["PledgeInFrozenPosition"] = "int"
#正回购使用的标准券数量
CSecurityFtdcInvestorPositionDetailField["RepurchasePosition"] = "int"
#融资融券金额
CSecurityFtdcInvestorPositionDetailField["Amount"] = "float"
#标的合约代码
CSecurityFtdcInvestorPositionDetailField["UnderlyingInstrumentID"] = "string"
structDict['CSecurityFtdcInvestorPositionDetailField'] = CSecurityFtdcInvestorPositionDetailField
#债券利息
CSecurityFtdcBondInterestField = {}
#交易日
CSecurityFtdcBondInterestField["TradingDay"] = "string"
#交易所代码
CSecurityFtdcBondInterestField["ExchangeID"] = "string"
#合约代码
CSecurityFtdcBondInterestField["InstrumentID"] = "string"
#利息
CSecurityFtdcBondInterestField["Interest"] = "float"
structDict['CSecurityFtdcBondInterestField'] = CSecurityFtdcBondInterestField
#市值配售信息
CSecurityFtdcMarketRationInfoField = {}
#经纪公司代码
CSecurityFtdcMarketRationInfoField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcMarketRationInfoField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcMarketRationInfoField["ExchangeID"] = "string"
#可配售手数
CSecurityFtdcMarketRationInfoField["RationVolume"] = "int"
structDict['CSecurityFtdcMarketRationInfoField'] = CSecurityFtdcMarketRationInfoField
#合约手续费率
CSecurityFtdcInstrumentCommissionRateField = {}
#交易所代码
CSecurityFtdcInstrumentCommissionRateField["ExchangeID"] = "string"
#合约代码
CSecurityFtdcInstrumentCommissionRateField["InstrumentID"] = "string"
#投资者范围
CSecurityFtdcInstrumentCommissionRateField["InvestorRange"] = "string"
#经纪公司代码
CSecurityFtdcInstrumentCommissionRateField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInstrumentCommissionRateField["InvestorID"] = "string"
#买卖方向
CSecurityFtdcInstrumentCommissionRateField["Direction"] = "string"
#印花税率
CSecurityFtdcInstrumentCommissionRateField["StampTaxRateByMoney"] = "float"
#印花税率(按手数)
CSecurityFtdcInstrumentCommissionRateField["StampTaxRateByVolume"] = "float"
#过户费率
CSecurityFtdcInstrumentCommissionRateField["TransferFeeRateByMoney"] = "float"
#过户费率(按手数)
CSecurityFtdcInstrumentCommissionRateField["TransferFeeRateByVolume"] = "float"
#交易费
CSecurityFtdcInstrumentCommissionRateField["TradeFeeByMoney"] = "float"
#交易费(按手数)
CSecurityFtdcInstrumentCommissionRateField["TradeFeeByVolume"] = "float"
#交易附加费率
CSecurityFtdcInstrumentCommissionRateField["MarginByMoney"] = "float"
#最小交易费
CSecurityFtdcInstrumentCommissionRateField["MinTradeFee"] = "float"
structDict['CSecurityFtdcInstrumentCommissionRateField'] = CSecurityFtdcInstrumentCommissionRateField
#余券信息
CSecurityFtdcExcessStockInfoField = {}
#经纪公司代码
CSecurityFtdcExcessStockInfoField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcExcessStockInfoField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcExcessStockInfoField["ExchangeID"] = "string"
#合约代码
CSecurityFtdcExcessStockInfoField["InstrumentID"] = "string"
#余券数量
CSecurityFtdcExcessStockInfoField["ExcessVolume"] = "int"
#余券冻结数量
CSecurityFtdcExcessStockInfoField["ExcessFrozenVolume"] = "int"
structDict['CSecurityFtdcExcessStockInfoField'] = CSecurityFtdcExcessStockInfoField
#ETF合约
CSecurityFtdcETFInstrumentField = {}
#交易所代码
CSecurityFtdcETFInstrumentField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcETFInstrumentField["ETFInstrumentID"] = "string"
#ETF对应申赎代码
CSecurityFtdcETFInstrumentField["ETFPurRedInstrumentID"] = "string"
#最小申购赎回单位对应的ETF份数
CSecurityFtdcETFInstrumentField["CreationRedemptionUnit"] = "int"
#最大现金替代比例
CSecurityFtdcETFInstrumentField["Maxcashratio"] = "float"
#基金当天申购赎回状态
CSecurityFtdcETFInstrumentField["Creationredemption"] = "string"
#预估金额
CSecurityFtdcETFInstrumentField["EstimateCashComponent"] = "float"
structDict['CSecurityFtdcETFInstrumentField'] = CSecurityFtdcETFInstrumentField
#ETF股票篮
CSecurityFtdcETFBasketField = {}
#交易所代码
CSecurityFtdcETFBasketField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcETFBasketField["ETFInstrumentID"] = "string"
#股票证券代码
CSecurityFtdcETFBasketField["StockInstrumentID"] = "string"
#股票证券名称
CSecurityFtdcETFBasketField["StockInstrumentName"] = "string"
#股票数量
CSecurityFtdcETFBasketField["Volume"] = "int"
#替代标志
CSecurityFtdcETFBasketField["CurrenceReplaceStatus"] = "string"
#溢价比例
CSecurityFtdcETFBasketField["Premium"] = "float"
#总金额
CSecurityFtdcETFBasketField["Amount"] = "float"
structDict['CSecurityFtdcETFBasketField'] = CSecurityFtdcETFBasketField
#OF合约
CSecurityFtdcOFInstrumentField = {}
#交易所代码
CSecurityFtdcOFInstrumentField["ExchangeID"] = "string"
#OF基金代码
CSecurityFtdcOFInstrumentField["InstrumentID"] = "string"
#基金当天申购赎回状态
CSecurityFtdcOFInstrumentField["Creationredemption"] = "string"
#基金净值
CSecurityFtdcOFInstrumentField["NetPrice"] = "float"
structDict['CSecurityFtdcOFInstrumentField'] = CSecurityFtdcOFInstrumentField
#ETF合约
CSecurityFtdcSFInstrumentField = {}
#交易所代码
CSecurityFtdcSFInstrumentField["ExchangeID"] = "string"
#基金代码
CSecurityFtdcSFInstrumentField["InstrumentID"] = "string"
#SF基金代码
CSecurityFtdcSFInstrumentField["SFInstrumentID"] = "string"
#基金当天拆分合并状态
CSecurityFtdcSFInstrumentField["SplitMergeStatus"] = "string"
#最小拆分数量
CSecurityFtdcSFInstrumentField["MinSplitVolume"] = "int"
#最小合并数量
CSecurityFtdcSFInstrumentField["MinMergeVolume"] = "int"
#拆分/合并比例
CSecurityFtdcSFInstrumentField["VolumeRatio"] = "int"
#基金净值
CSecurityFtdcSFInstrumentField["NetPrice"] = "float"
structDict['CSecurityFtdcSFInstrumentField'] = CSecurityFtdcSFInstrumentField
#交易所交易员报盘机
CSecurityFtdcTraderOfferField = {}
#交易所代码
CSecurityFtdcTraderOfferField["ExchangeID"] = "string"
#交易所交易员代码
CSecurityFtdcTraderOfferField["BranchPBU"] = "string"
#会员代码
CSecurityFtdcTraderOfferField["ParticipantID"] = "string"
#密码
CSecurityFtdcTraderOfferField["Password"] = "string"
#安装编号
CSecurityFtdcTraderOfferField["InstallID"] = "int"
#本地报单编号
CSecurityFtdcTraderOfferField["OrderLocalID"] = "string"
#交易所交易员连接状态
CSecurityFtdcTraderOfferField["TraderConnectStatus"] = "string"
#发出连接请求的日期
CSecurityFtdcTraderOfferField["ConnectRequestDate"] = "string"
#发出连接请求的时间
CSecurityFtdcTraderOfferField["ConnectRequestTime"] = "string"
#上次报告日期
CSecurityFtdcTraderOfferField["LastReportDate"] = "string"
#上次报告时间
CSecurityFtdcTraderOfferField["LastReportTime"] = "string"
#完成连接日期
CSecurityFtdcTraderOfferField["ConnectDate"] = "string"
#完成连接时间
CSecurityFtdcTraderOfferField["ConnectTime"] = "string"
#启动日期
CSecurityFtdcTraderOfferField["StartDate"] = "string"
#启动时间
CSecurityFtdcTraderOfferField["StartTime"] = "string"
#交易日
CSecurityFtdcTraderOfferField["TradingDay"] = "string"
#经纪公司代码
CSecurityFtdcTraderOfferField["BrokerID"] = "string"
structDict['CSecurityFtdcTraderOfferField'] = CSecurityFtdcTraderOfferField
#交易所行情报盘机
CSecurityFtdcMDTraderOfferField = {}
#交易所代码
CSecurityFtdcMDTraderOfferField["ExchangeID"] = "string"
#交易所交易员代码
CSecurityFtdcMDTraderOfferField["BranchPBU"] = "string"
#会员代码
CSecurityFtdcMDTraderOfferField["ParticipantID"] = "string"
#密码
CSecurityFtdcMDTraderOfferField["Password"] = "string"
#安装编号
CSecurityFtdcMDTraderOfferField["InstallID"] = "int"
#本地报单编号
CSecurityFtdcMDTraderOfferField["OrderLocalID"] = "string"
#交易所交易员连接状态
CSecurityFtdcMDTraderOfferField["TraderConnectStatus"] = "string"
#发出连接请求的日期
CSecurityFtdcMDTraderOfferField["ConnectRequestDate"] = "string"
#发出连接请求的时间
CSecurityFtdcMDTraderOfferField["ConnectRequestTime"] = "string"
#上次报告日期
CSecurityFtdcMDTraderOfferField["LastReportDate"] = "string"
#上次报告时间
CSecurityFtdcMDTraderOfferField["LastReportTime"] = "string"
#完成连接日期
CSecurityFtdcMDTraderOfferField["ConnectDate"] = "string"
#完成连接时间
CSecurityFtdcMDTraderOfferField["ConnectTime"] = "string"
#启动日期
CSecurityFtdcMDTraderOfferField["StartDate"] = "string"
#启动时间
CSecurityFtdcMDTraderOfferField["StartTime"] = "string"
#交易日
CSecurityFtdcMDTraderOfferField["TradingDay"] = "string"
#经纪公司代码
CSecurityFtdcMDTraderOfferField["BrokerID"] = "string"
structDict['CSecurityFtdcMDTraderOfferField'] = CSecurityFtdcMDTraderOfferField
#前置状态
CSecurityFtdcFrontStatusField = {}
#前置编号
CSecurityFtdcFrontStatusField["FrontID"] = "int"
#上次报告日期
CSecurityFtdcFrontStatusField["LastReportDate"] = "string"
#上次报告时间
CSecurityFtdcFrontStatusField["LastReportTime"] = "string"
#是否活跃
CSecurityFtdcFrontStatusField["IsActive"] = "int"
structDict['CSecurityFtdcFrontStatusField'] = CSecurityFtdcFrontStatusField
#用户会话
CSecurityFtdcUserSessionField = {}
#前置编号
CSecurityFtdcUserSessionField["FrontID"] = "int"
#会话编号
CSecurityFtdcUserSessionField["SessionID"] = "int"
#经纪公司代码
CSecurityFtdcUserSessionField["BrokerID"] = "string"
#用户代码
CSecurityFtdcUserSessionField["UserID"] = "string"
#登录日期
CSecurityFtdcUserSessionField["LoginDate"] = "string"
#登录时间
CSecurityFtdcUserSessionField["LoginTime"] = "string"
#IP地址
CSecurityFtdcUserSessionField["IPAddress"] = "string"
#用户端产品信息
CSecurityFtdcUserSessionField["UserProductInfo"] = "string"
#接口端产品信息
CSecurityFtdcUserSessionField["InterfaceProductInfo"] = "string"
#协议信息
CSecurityFtdcUserSessionField["ProtocolInfo"] = "string"
#Mac地址
CSecurityFtdcUserSessionField["MacAddress"] = "string"
structDict['CSecurityFtdcUserSessionField'] = CSecurityFtdcUserSessionField
#报单
CSecurityFtdcOrderField = {}
#经纪公司代码
CSecurityFtdcOrderField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcOrderField["InvestorID"] = "string"
#合约代码
CSecurityFtdcOrderField["InstrumentID"] = "string"
#报单引用
CSecurityFtdcOrderField["OrderRef"] = "string"
#用户代码
CSecurityFtdcOrderField["UserID"] = "string"
#交易所代码
CSecurityFtdcOrderField["ExchangeID"] = "string"
#报单价格条件
CSecurityFtdcOrderField["OrderPriceType"] = "string"
#买卖方向
CSecurityFtdcOrderField["Direction"] = "string"
#组合开平标志
CSecurityFtdcOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CSecurityFtdcOrderField["CombHedgeFlag"] = "string"
#价格
CSecurityFtdcOrderField["LimitPrice"] = "string"
#数量
CSecurityFtdcOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CSecurityFtdcOrderField["TimeCondition"] = "string"
#GTD日期
CSecurityFtdcOrderField["GTDDate"] = "string"
#成交量类型
CSecurityFtdcOrderField["VolumeCondition"] = "string"
#最小成交量
CSecurityFtdcOrderField["MinVolume"] = "int"
#触发条件
CSecurityFtdcOrderField["ContingentCondition"] = "string"
#止损价
CSecurityFtdcOrderField["StopPrice"] = "float"
#强平原因
CSecurityFtdcOrderField["ForceCloseReason"] = "string"
#自动挂起标志
CSecurityFtdcOrderField["IsAutoSuspend"] = "int"
#业务单元
CSecurityFtdcOrderField["BusinessUnit"] = "string"
#请求编号
CSecurityFtdcOrderField["RequestID"] = "int"
#本地报单编号
CSecurityFtdcOrderField["OrderLocalID"] = "string"
#会员代码
CSecurityFtdcOrderField["ParticipantID"] = "string"
#客户代码
CSecurityFtdcOrderField["ClientID"] = "string"
#合约在交易所的代码
CSecurityFtdcOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CSecurityFtdcOrderField["BranchPBU"] = "string"
#安装编号
CSecurityFtdcOrderField["InstallID"] = "int"
#报单提交状态
CSecurityFtdcOrderField["OrderSubmitStatus"] = "string"
#账户代码
CSecurityFtdcOrderField["AccountID"] = "string"
#报单提示序号
CSecurityFtdcOrderField["NotifySequence"] = "int"
#交易日
CSecurityFtdcOrderField["TradingDay"] = "string"
#报单编号
CSecurityFtdcOrderField["OrderSysID"] = "string"
#报单来源
CSecurityFtdcOrderField["OrderSource"] = "string"
#报单状态
CSecurityFtdcOrderField["OrderStatus"] = "string"
#报单类型
CSecurityFtdcOrderField["OrderType"] = "string"
#今成交数量
CSecurityFtdcOrderField["VolumeTraded"] = "int"
#剩余数量
CSecurityFtdcOrderField["VolumeTotal"] = "int"
#报单日期
CSecurityFtdcOrderField["InsertDate"] = "string"
#委托时间
CSecurityFtdcOrderField["InsertTime"] = "string"
#激活时间
CSecurityFtdcOrderField["ActiveTime"] = "string"
#挂起时间
CSecurityFtdcOrderField["SuspendTime"] = "string"
#最后修改时间
CSecurityFtdcOrderField["UpdateTime"] = "string"
#撤销时间
CSecurityFtdcOrderField["CancelTime"] = "string"
#最后修改交易所交易员代码
CSecurityFtdcOrderField["ActiveTraderID"] = "string"
#结算会员编号
CSecurityFtdcOrderField["ClearingPartID"] = "string"
#序号
CSecurityFtdcOrderField["SequenceNo"] = "int"
#前置编号
CSecurityFtdcOrderField["FrontID"] = "int"
#会话编号
CSecurityFtdcOrderField["SessionID"] = "int"
#用户端产品信息
CSecurityFtdcOrderField["UserProductInfo"] = "string"
#状态信息
CSecurityFtdcOrderField["StatusMsg"] = "string"
#用户强评标志
CSecurityFtdcOrderField["UserForceClose"] = "int"
#操作用户代码
CSecurityFtdcOrderField["ActiveUserID"] = "string"
#经纪公司报单编号
CSecurityFtdcOrderField["BrokerOrderSeq"] = "int"
#相关报单
CSecurityFtdcOrderField["RelativeOrderSysID"] = "string"
#营业部编号
CSecurityFtdcOrderField["BranchID"] = "string"
#成交数量
CSecurityFtdcOrderField["TradeAmount"] = "float"
#是否ETF
CSecurityFtdcOrderField["IsETF"] = "int"
#合约类型
CSecurityFtdcOrderField["InstrumentType"] = "string"
structDict['CSecurityFtdcOrderField'] = CSecurityFtdcOrderField
#报单操作
CSecurityFtdcOrderActionField = {}
#经纪公司代码
CSecurityFtdcOrderActionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcOrderActionField["InvestorID"] = "string"
#报单操作引用
CSecurityFtdcOrderActionField["OrderActionRef"] = "int"
#报单引用
CSecurityFtdcOrderActionField["OrderRef"] = "string"
#请求编号
CSecurityFtdcOrderActionField["RequestID"] = "int"
#前置编号
CSecurityFtdcOrderActionField["FrontID"] = "int"
#会话编号
CSecurityFtdcOrderActionField["SessionID"] = "int"
#交易所代码
CSecurityFtdcOrderActionField["ExchangeID"] = "string"
#操作标志
CSecurityFtdcOrderActionField["ActionFlag"] = "string"
#价格
CSecurityFtdcOrderActionField["LimitPrice"] = "float"
#数量变化
CSecurityFtdcOrderActionField["VolumeChange"] = "int"
#操作日期
CSecurityFtdcOrderActionField["ActionDate"] = "string"
#操作时间
CSecurityFtdcOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CSecurityFtdcOrderActionField["BranchPBU"] = "string"
#安装编号
CSecurityFtdcOrderActionField["InstallID"] = "int"
#本地报单编号
CSecurityFtdcOrderActionField["OrderLocalID"] = "string"
#操作本地编号
CSecurityFtdcOrderActionField["ActionLocalID"] = "string"
#会员代码
CSecurityFtdcOrderActionField["ParticipantID"] = "string"
#客户代码
CSecurityFtdcOrderActionField["ClientID"] = "string"
#业务单元
CSecurityFtdcOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CSecurityFtdcOrderActionField["OrderActionStatus"] = "string"
#用户代码
CSecurityFtdcOrderActionField["UserID"] = "string"
#营业部编号
CSecurityFtdcOrderActionField["BranchID"] = "string"
#状态信息
CSecurityFtdcOrderActionField["StatusMsg"] = "string"
#合约代码
CSecurityFtdcOrderActionField["InstrumentID"] = "string"
#合约类型
CSecurityFtdcOrderActionField["InstrumentType"] = "string"
structDict['CSecurityFtdcOrderActionField'] = CSecurityFtdcOrderActionField
#错误报单
CSecurityFtdcErrOrderField = {}
#经纪公司代码
CSecurityFtdcErrOrderField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcErrOrderField["InvestorID"] = "string"
#合约代码
CSecurityFtdcErrOrderField["InstrumentID"] = "string"
#报单引用
CSecurityFtdcErrOrderField["OrderRef"] = "string"
#用户代码
CSecurityFtdcErrOrderField["UserID"] = "string"
#交易所代码
CSecurityFtdcErrOrderField["ExchangeID"] = "string"
#报单价格条件
CSecurityFtdcErrOrderField["OrderPriceType"] = "string"
#买卖方向
CSecurityFtdcErrOrderField["Direction"] = "string"
#组合开平标志
CSecurityFtdcErrOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CSecurityFtdcErrOrderField["CombHedgeFlag"] = "string"
#价格
CSecurityFtdcErrOrderField["LimitPrice"] = "string"
#数量
CSecurityFtdcErrOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CSecurityFtdcErrOrderField["TimeCondition"] = "string"
#GTD日期
CSecurityFtdcErrOrderField["GTDDate"] = "string"
#成交量类型
CSecurityFtdcErrOrderField["VolumeCondition"] = "string"
#最小成交量
CSecurityFtdcErrOrderField["MinVolume"] = "int"
#触发条件
CSecurityFtdcErrOrderField["ContingentCondition"] = "string"
#止损价
CSecurityFtdcErrOrderField["StopPrice"] = "float"
#强平原因
CSecurityFtdcErrOrderField["ForceCloseReason"] = "string"
#自动挂起标志
CSecurityFtdcErrOrderField["IsAutoSuspend"] = "int"
#业务单元
CSecurityFtdcErrOrderField["BusinessUnit"] = "string"
#请求编号
CSecurityFtdcErrOrderField["RequestID"] = "int"
#用户强评标志
CSecurityFtdcErrOrderField["UserForceClose"] = "int"
#错误代码
CSecurityFtdcErrOrderField["ErrorID"] = "int"
#错误信息
CSecurityFtdcErrOrderField["ErrorMsg"] = "string"
structDict['CSecurityFtdcErrOrderField'] = CSecurityFtdcErrOrderField
#错误报单操作
CSecurityFtdcErrOrderActionField = {}
#经纪公司代码
CSecurityFtdcErrOrderActionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcErrOrderActionField["InvestorID"] = "string"
#报单操作引用
CSecurityFtdcErrOrderActionField["OrderActionRef"] = "int"
#报单引用
CSecurityFtdcErrOrderActionField["OrderRef"] = "string"
#请求编号
CSecurityFtdcErrOrderActionField["RequestID"] = "int"
#前置编号
CSecurityFtdcErrOrderActionField["FrontID"] = "int"
#会话编号
CSecurityFtdcErrOrderActionField["SessionID"] = "int"
#交易所代码
CSecurityFtdcErrOrderActionField["ExchangeID"] = "string"
#操作标志
CSecurityFtdcErrOrderActionField["ActionFlag"] = "string"
#价格
CSecurityFtdcErrOrderActionField["LimitPrice"] = "float"
#数量变化
CSecurityFtdcErrOrderActionField["VolumeChange"] = "int"
#操作日期
CSecurityFtdcErrOrderActionField["ActionDate"] = "string"
#操作时间
CSecurityFtdcErrOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CSecurityFtdcErrOrderActionField["BranchPBU"] = "string"
#安装编号
CSecurityFtdcErrOrderActionField["InstallID"] = "int"
#本地报单编号
CSecurityFtdcErrOrderActionField["OrderLocalID"] = "string"
#操作本地编号
CSecurityFtdcErrOrderActionField["ActionLocalID"] = "string"
#会员代码
CSecurityFtdcErrOrderActionField["ParticipantID"] = "string"
#客户代码
CSecurityFtdcErrOrderActionField["ClientID"] = "string"
#业务单元
CSecurityFtdcErrOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CSecurityFtdcErrOrderActionField["OrderActionStatus"] = "string"
#用户代码
CSecurityFtdcErrOrderActionField["UserID"] = "string"
#营业部编号
CSecurityFtdcErrOrderActionField["BranchID"] = "string"
#状态信息
CSecurityFtdcErrOrderActionField["StatusMsg"] = "string"
#合约代码
CSecurityFtdcErrOrderActionField["InstrumentID"] = "string"
#错误代码
CSecurityFtdcErrOrderActionField["ErrorID"] = "int"
#错误信息
CSecurityFtdcErrOrderActionField["ErrorMsg"] = "string"
#合约类型
CSecurityFtdcErrOrderActionField["InstrumentType"] = "string"
structDict['CSecurityFtdcErrOrderActionField'] = CSecurityFtdcErrOrderActionField
#成交
CSecurityFtdcTradeField = {}
#经纪公司代码
CSecurityFtdcTradeField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcTradeField["InvestorID"] = "string"
#合约代码
CSecurityFtdcTradeField["InstrumentID"] = "string"
#报单引用
CSecurityFtdcTradeField["OrderRef"] = "string"
#用户代码
CSecurityFtdcTradeField["UserID"] = "string"
#交易所代码
CSecurityFtdcTradeField["ExchangeID"] = "string"
#成交编号
CSecurityFtdcTradeField["TradeID"] = "string"
#买卖方向
CSecurityFtdcTradeField["Direction"] = "string"
#报单编号
CSecurityFtdcTradeField["OrderSysID"] = "string"
#会员代码
CSecurityFtdcTradeField["ParticipantID"] = "string"
#客户代码
CSecurityFtdcTradeField["ClientID"] = "string"
#交易角色
CSecurityFtdcTradeField["TradingRole"] = "string"
#合约在交易所的代码
CSecurityFtdcTradeField["ExchangeInstID"] = "string"
#开平标志
CSecurityFtdcTradeField["OffsetFlag"] = "string"
#投机套保标志
CSecurityFtdcTradeField["HedgeFlag"] = "string"
#价格
CSecurityFtdcTradeField["Price"] = "string"
#数量
CSecurityFtdcTradeField["Volume"] = "int"
#成交时期
CSecurityFtdcTradeField["TradeDate"] = "string"
#成交时间
CSecurityFtdcTradeField["TradeTime"] = "string"
#成交类型
CSecurityFtdcTradeField["TradeType"] = "string"
#成交价来源
CSecurityFtdcTradeField["PriceSource"] = "string"
#交易所交易员代码
CSecurityFtdcTradeField["BranchPBU"] = "string"
#本地报单编号
CSecurityFtdcTradeField["OrderLocalID"] = "string"
#结算会员编号
CSecurityFtdcTradeField["ClearingPartID"] = "string"
#业务单元
CSecurityFtdcTradeField["BusinessUnit"] = "string"
#序号
CSecurityFtdcTradeField["SequenceNo"] = "int"
#成交来源
CSecurityFtdcTradeField["TradeSource"] = "string"
#交易日
CSecurityFtdcTradeField["TradingDay"] = "string"
#经纪公司报单编号
CSecurityFtdcTradeField["BrokerOrderSeq"] = "int"
#成交金额
CSecurityFtdcTradeField["TradeAmount"] = "float"
#成交序号
CSecurityFtdcTradeField["TradeIndex"] = "int"
structDict['CSecurityFtdcTradeField'] = CSecurityFtdcTradeField
#投资者持仓
CSecurityFtdcInvestorPositionField = {}
#合约代码
CSecurityFtdcInvestorPositionField["InstrumentID"] = "string"
#经纪公司代码
CSecurityFtdcInvestorPositionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInvestorPositionField["InvestorID"] = "string"
#持仓多空方向
CSecurityFtdcInvestorPositionField["PosiDirection"] = "string"
#投机套保标志
CSecurityFtdcInvestorPositionField["HedgeFlag"] = "string"
#持仓日期
CSecurityFtdcInvestorPositionField["PositionDate"] = "string"
#上日持仓
CSecurityFtdcInvestorPositionField["YdPosition"] = "int"
#今日持仓
CSecurityFtdcInvestorPositionField["Position"] = "int"
#多头冻结
CSecurityFtdcInvestorPositionField["LongFrozen"] = "int"
#空头冻结
CSecurityFtdcInvestorPositionField["ShortFrozen"] = "int"
#开仓冻结金额
CSecurityFtdcInvestorPositionField["LongFrozenAmount"] = "float"
#开仓冻结金额
CSecurityFtdcInvestorPositionField["ShortFrozenAmount"] = "float"
#开仓量
CSecurityFtdcInvestorPositionField["OpenVolume"] = "int"
#平仓量
CSecurityFtdcInvestorPositionField["CloseVolume"] = "int"
#开仓金额
CSecurityFtdcInvestorPositionField["OpenAmount"] = "float"
#平仓金额
CSecurityFtdcInvestorPositionField["CloseAmount"] = "float"
#持仓成本
CSecurityFtdcInvestorPositionField["PositionCost"] = "float"
#冻结的资金
CSecurityFtdcInvestorPositionField["FrozenCash"] = "float"
#资金差额
CSecurityFtdcInvestorPositionField["CashIn"] = "float"
#手续费
CSecurityFtdcInvestorPositionField["Commission"] = "float"
#上次结算价
CSecurityFtdcInvestorPositionField["PreSettlementPrice"] = "float"
#本次结算价
CSecurityFtdcInvestorPositionField["SettlementPrice"] = "float"
#交易日
CSecurityFtdcInvestorPositionField["TradingDay"] = "string"
#开仓成本
CSecurityFtdcInvestorPositionField["OpenCost"] = "float"
#交易所保证金
CSecurityFtdcInvestorPositionField["ExchangeMargin"] = "float"
#今日持仓
CSecurityFtdcInvestorPositionField["TodayPosition"] = "int"
#过户费
CSecurityFtdcInvestorPositionField["TransferFee"] = "float"
#印花税
CSecurityFtdcInvestorPositionField["StampTax"] = "float"
#今日申购赎回数量
CSecurityFtdcInvestorPositionField["TodayPurRedVolume"] = "int"
#折算率
CSecurityFtdcInvestorPositionField["ConversionRate"] = "float"
#折算金额
CSecurityFtdcInvestorPositionField["ConversionAmount"] = "float"
#证券价值
CSecurityFtdcInvestorPositionField["StockValue"] = "float"
#交易所代码
CSecurityFtdcInvestorPositionField["ExchangeID"] = "string"
#AccountID
CSecurityFtdcInvestorPositionField["AccountID"] = "string"
#质押入库数量
CSecurityFtdcInvestorPositionField["PledgeInPosition"] = "int"
#正回购使用的标准券数量
CSecurityFtdcInvestorPositionField["RepurchasePosition"] = "int"
#ETF申赎空头冻结
CSecurityFtdcInvestorPositionField["PurRedShortFrozen"] = "int"
#融资买入数量
CSecurityFtdcInvestorPositionField["MarginTradeVolume"] = "int"
#融资买入金额
CSecurityFtdcInvestorPositionField["MarginTradeAmount"] = "float"
#融资买入冻结数量
CSecurityFtdcInvestorPositionField["MarginTradeFrozenVolume"] = "int"
#融资买入冻结金额
CSecurityFtdcInvestorPositionField["MarginTradeFrozenAmount"] = "float"
#融资买入盈亏
CSecurityFtdcInvestorPositionField["MarginTradeConversionProfit"] = "float"
#融券卖出数量
CSecurityFtdcInvestorPositionField["ShortSellVolume"] = "int"
#融券卖出金额
CSecurityFtdcInvestorPositionField["ShortSellAmount"] = "float"
#融券卖出冻结数量
CSecurityFtdcInvestorPositionField["ShortSellFrozenVolume"] = "int"
#融券卖出冻结金额
CSecurityFtdcInvestorPositionField["ShortSellFrozenAmount"] = "float"
#融券卖出盈亏
CSecurityFtdcInvestorPositionField["ShortSellConversionProfit"] = "float"
#融券总市值
CSecurityFtdcInvestorPositionField["SSStockValue"] = "float"
#今日融资持仓
CSecurityFtdcInvestorPositionField["TodayMTPosition"] = "int"
#今日融券持仓
CSecurityFtdcInvestorPositionField["TodaySSPosition"] = "int"
#历史持仓开仓成本
CSecurityFtdcInvestorPositionField["YdOpenCost"] = "float"
#锁定仓位
CSecurityFtdcInvestorPositionField["LockPosition"] = "int"
#备兑仓位
CSecurityFtdcInvestorPositionField["CoverPosition"] = "int"
#锁定冻结仓位
CSecurityFtdcInvestorPositionField["LockFrozenPosition"] = "int"
#解锁冻结仓位
CSecurityFtdcInvestorPositionField["UnlockFrozenPosition"] = "int"
#备兑冻结仓位
CSecurityFtdcInvestorPositionField["CoverFrozenPosition"] = "int"
#行权冻结仓位
CSecurityFtdcInvestorPositionField["ExecFrozenPosition"] = "int"
structDict['CSecurityFtdcInvestorPositionField'] = CSecurityFtdcInvestorPositionField
#出入金同步
CSecurityFtdcSyncDepositField = {}
#出入金流水号
CSecurityFtdcSyncDepositField["DepositSeqNo"] = "string"
#经纪公司代码
CSecurityFtdcSyncDepositField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcSyncDepositField["InvestorID"] = "string"
#入金金额
CSecurityFtdcSyncDepositField["Deposit"] = "float"
#是否强制进行
CSecurityFtdcSyncDepositField["IsForce"] = "int"
#账户代
CSecurityFtdcSyncDepositField["AccountID"] = "string"
structDict['CSecurityFtdcSyncDepositField'] = CSecurityFtdcSyncDepositField
#查询交易所
CSecurityFtdcQryExchangeField = {}
#交易所代码
CSecurityFtdcQryExchangeField["ExchangeID"] = "string"
structDict['CSecurityFtdcQryExchangeField'] = CSecurityFtdcQryExchangeField
#查询产品
CSecurityFtdcQryProductField = {}
#产品代码
CSecurityFtdcQryProductField["ProductID"] = "string"
structDict['CSecurityFtdcQryProductField'] = CSecurityFtdcQryProductField
#查询合约
CSecurityFtdcQryInstrumentField = {}
#合约代码
CSecurityFtdcQryInstrumentField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcQryInstrumentField["ExchangeID"] = "string"
#合约在交易所的代码
CSecurityFtdcQryInstrumentField["ExchangeInstID"] = "string"
#产品代码
CSecurityFtdcQryInstrumentField["ProductID"] = "string"
structDict['CSecurityFtdcQryInstrumentField'] = CSecurityFtdcQryInstrumentField
#查询经纪公司
CSecurityFtdcQryBrokerField = {}
#经纪公司代码
CSecurityFtdcQryBrokerField["BrokerID"] = "string"
structDict['CSecurityFtdcQryBrokerField'] = CSecurityFtdcQryBrokerField
#查询经纪公司会员代码
CSecurityFtdcQryPartBrokerField = {}
#交易所代码
CSecurityFtdcQryPartBrokerField["ExchangeID"] = "string"
#经纪公司代码
CSecurityFtdcQryPartBrokerField["BrokerID"] = "string"
#会员代码
CSecurityFtdcQryPartBrokerField["ParticipantID"] = "string"
structDict['CSecurityFtdcQryPartBrokerField'] = CSecurityFtdcQryPartBrokerField
#查询投资者
CSecurityFtdcQryInvestorField = {}
#经纪公司代码
CSecurityFtdcQryInvestorField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInvestorField["InvestorID"] = "string"
structDict['CSecurityFtdcQryInvestorField'] = CSecurityFtdcQryInvestorField
#查询交易编码
CSecurityFtdcQryTradingCodeField = {}
#经纪公司代码
CSecurityFtdcQryTradingCodeField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryTradingCodeField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcQryTradingCodeField["ExchangeID"] = "string"
#客户代码
CSecurityFtdcQryTradingCodeField["ClientID"] = "string"
structDict['CSecurityFtdcQryTradingCodeField'] = CSecurityFtdcQryTradingCodeField
#查询管理用户
CSecurityFtdcQrySuperUserField = {}
#用户代码
CSecurityFtdcQrySuperUserField["UserID"] = "string"
structDict['CSecurityFtdcQrySuperUserField'] = CSecurityFtdcQrySuperUserField
#查询管理用户功能权限
CSecurityFtdcQrySuperUserFunctionField = {}
#用户代码
CSecurityFtdcQrySuperUserFunctionField["UserID"] = "string"
structDict['CSecurityFtdcQrySuperUserFunctionField'] = CSecurityFtdcQrySuperUserFunctionField
#查询经纪公司用户
CSecurityFtdcQryBrokerUserField = {}
#经纪公司代码
CSecurityFtdcQryBrokerUserField["BrokerID"] = "string"
#用户代码
CSecurityFtdcQryBrokerUserField["UserID"] = "string"
structDict['CSecurityFtdcQryBrokerUserField'] = CSecurityFtdcQryBrokerUserField
#查询经纪公司用户权限
CSecurityFtdcQryBrokerUserFunctionField = {}
#经纪公司代码
CSecurityFtdcQryBrokerUserFunctionField["BrokerID"] = "string"
#用户代码
CSecurityFtdcQryBrokerUserFunctionField["UserID"] = "string"
structDict['CSecurityFtdcQryBrokerUserFunctionField'] = CSecurityFtdcQryBrokerUserFunctionField
#查询资金账户
CSecurityFtdcQryTradingAccountField = {}
#经纪公司代码
CSecurityFtdcQryTradingAccountField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryTradingAccountField["InvestorID"] = "string"
structDict['CSecurityFtdcQryTradingAccountField'] = CSecurityFtdcQryTradingAccountField
#查询禁止登录用户
CSecurityFtdcQryLoginForbiddenUserField = {}
#经纪公司代码
CSecurityFtdcQryLoginForbiddenUserField["BrokerID"] = "string"
#用户代码
CSecurityFtdcQryLoginForbiddenUserField["UserID"] = "string"
structDict['CSecurityFtdcQryLoginForbiddenUserField'] = CSecurityFtdcQryLoginForbiddenUserField
#查询行情
CSecurityFtdcQryDepthMarketDataField = {}
#合约代码
CSecurityFtdcQryDepthMarketDataField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryDepthMarketDataField'] = CSecurityFtdcQryDepthMarketDataField
#查询合约交易权限
CSecurityFtdcQryInstrumentTradingRightField = {}
#交易所代码
CSecurityFtdcQryInstrumentTradingRightField["ExchangeID"] = "string"
#经纪公司代码
CSecurityFtdcQryInstrumentTradingRightField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInstrumentTradingRightField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryInstrumentTradingRightField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryInstrumentTradingRightField'] = CSecurityFtdcQryInstrumentTradingRightField
#查询投资者持仓明细
CSecurityFtdcQryInvestorPositionDetailField = {}
#经纪公司代码
CSecurityFtdcQryInvestorPositionDetailField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInvestorPositionDetailField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryInvestorPositionDetailField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryInvestorPositionDetailField'] = CSecurityFtdcQryInvestorPositionDetailField
#查询债券利息
CSecurityFtdcQryBondInterestField = {}
#交易所代码
CSecurityFtdcQryBondInterestField["ExchangeID"] = "string"
#合约代码
CSecurityFtdcQryBondInterestField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryBondInterestField'] = CSecurityFtdcQryBondInterestField
#查询市值配售信息
CSecurityFtdcQryMarketRationInfoField = {}
#经纪公司代码
CSecurityFtdcQryMarketRationInfoField["BrokerID"] = "string"
#投资者帐号
CSecurityFtdcQryMarketRationInfoField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcQryMarketRationInfoField["ExchangeID"] = "string"
structDict['CSecurityFtdcQryMarketRationInfoField'] = CSecurityFtdcQryMarketRationInfoField
#查询合约手续费率
CSecurityFtdcQryInstrumentCommissionRateField = {}
#交易所代码
CSecurityFtdcQryInstrumentCommissionRateField["ExchangeID"] = "string"
#经纪公司代码
CSecurityFtdcQryInstrumentCommissionRateField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInstrumentCommissionRateField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryInstrumentCommissionRateField["InstrumentID"] = "string"
#买卖方向
CSecurityFtdcQryInstrumentCommissionRateField["Direction"] = "string"
#开平标志
CSecurityFtdcQryInstrumentCommissionRateField["OffsetFlag"] = "string"
structDict['CSecurityFtdcQryInstrumentCommissionRateField'] = CSecurityFtdcQryInstrumentCommissionRateField
#查询余券信息
CSecurityFtdcQryExcessStockInfoField = {}
#经纪公司代码
CSecurityFtdcQryExcessStockInfoField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryExcessStockInfoField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcQryExcessStockInfoField["ExchangeID"] = "string"
#合约代码
CSecurityFtdcQryExcessStockInfoField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryExcessStockInfoField'] = CSecurityFtdcQryExcessStockInfoField
#查询投资者帐户关系
CSecurityFtdcQryInvestorAccountField = {}
#经纪公司代码
CSecurityFtdcQryInvestorAccountField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInvestorAccountField["InvestorID"] = "string"
structDict['CSecurityFtdcQryInvestorAccountField'] = CSecurityFtdcQryInvestorAccountField
#查询ETF合约
CSecurityFtdcQryETFInstrumentField = {}
#交易所代码
CSecurityFtdcQryETFInstrumentField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcQryETFInstrumentField["ETFInstrumentID"] = "string"
structDict['CSecurityFtdcQryETFInstrumentField'] = CSecurityFtdcQryETFInstrumentField
#查询ETF股票篮
CSecurityFtdcQryETFBasketField = {}
#交易所代码
CSecurityFtdcQryETFBasketField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcQryETFBasketField["ETFInstrumentID"] = "string"
structDict['CSecurityFtdcQryETFBasketField'] = CSecurityFtdcQryETFBasketField
#查询OF合约
CSecurityFtdcQryOFInstrumentField = {}
#交易所代码
CSecurityFtdcQryOFInstrumentField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcQryOFInstrumentField["OFInstrumentID"] = "string"
structDict['CSecurityFtdcQryOFInstrumentField'] = CSecurityFtdcQryOFInstrumentField
#查询SF合约
CSecurityFtdcQrySFInstrumentField = {}
#交易所代码
CSecurityFtdcQrySFInstrumentField["ExchangeID"] = "string"
#ETF证券代码
CSecurityFtdcQrySFInstrumentField["SFInstrumentID"] = "string"
structDict['CSecurityFtdcQrySFInstrumentField'] = CSecurityFtdcQrySFInstrumentField
#查询交易员报盘机
CSecurityFtdcQryTraderOfferField = {}
#交易所代码
CSecurityFtdcQryTraderOfferField["ExchangeID"] = "string"
#会员代码
CSecurityFtdcQryTraderOfferField["ParticipantID"] = "string"
#交易所交易员代码
CSecurityFtdcQryTraderOfferField["BranchPBU"] = "string"
structDict['CSecurityFtdcQryTraderOfferField'] = CSecurityFtdcQryTraderOfferField
#查询行情报盘机
CSecurityFtdcQryMDTraderOfferField = {}
#交易所代码
CSecurityFtdcQryMDTraderOfferField["ExchangeID"] = "string"
#会员代码
CSecurityFtdcQryMDTraderOfferField["ParticipantID"] = "string"
#交易所交易员代码
CSecurityFtdcQryMDTraderOfferField["BranchPBU"] = "string"
structDict['CSecurityFtdcQryMDTraderOfferField'] = CSecurityFtdcQryMDTraderOfferField
#查询前置状态
CSecurityFtdcQryFrontStatusField = {}
#前置编号
CSecurityFtdcQryFrontStatusField["FrontID"] = "int"
structDict['CSecurityFtdcQryFrontStatusField'] = CSecurityFtdcQryFrontStatusField
#查询用户会话
CSecurityFtdcQryUserSessionField = {}
#前置编号
CSecurityFtdcQryUserSessionField["FrontID"] = "int"
#会话编号
CSecurityFtdcQryUserSessionField["SessionID"] = "int"
#经纪公司代码
CSecurityFtdcQryUserSessionField["BrokerID"] = "string"
#用户代码
CSecurityFtdcQryUserSessionField["UserID"] = "string"
structDict['CSecurityFtdcQryUserSessionField'] = CSecurityFtdcQryUserSessionField
#查询报单
CSecurityFtdcQryOrderField = {}
#经纪公司代码
CSecurityFtdcQryOrderField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryOrderField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryOrderField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcQryOrderField["ExchangeID"] = "string"
#报单编号
CSecurityFtdcQryOrderField["OrderSysID"] = "string"
#开始时间
CSecurityFtdcQryOrderField["InsertTimeStart"] = "string"
#结束时间
CSecurityFtdcQryOrderField["InsertTimeEnd"] = "string"
structDict['CSecurityFtdcQryOrderField'] = CSecurityFtdcQryOrderField
#查询报单操作
CSecurityFtdcQryOrderActionField = {}
#经纪公司代码
CSecurityFtdcQryOrderActionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryOrderActionField["InvestorID"] = "string"
#交易所代码
CSecurityFtdcQryOrderActionField["ExchangeID"] = "string"
structDict['CSecurityFtdcQryOrderActionField'] = CSecurityFtdcQryOrderActionField
#查询错误报单
CSecurityFtdcQryErrOrderField = {}
#经纪公司代码
CSecurityFtdcQryErrOrderField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryErrOrderField["InvestorID"] = "string"
structDict['CSecurityFtdcQryErrOrderField'] = CSecurityFtdcQryErrOrderField
#查询错误报单操作
CSecurityFtdcQryErrOrderActionField = {}
#经纪公司代码
CSecurityFtdcQryErrOrderActionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryErrOrderActionField["InvestorID"] = "string"
structDict['CSecurityFtdcQryErrOrderActionField'] = CSecurityFtdcQryErrOrderActionField
#查询成交
CSecurityFtdcQryTradeField = {}
#经纪公司代码
CSecurityFtdcQryTradeField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryTradeField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryTradeField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcQryTradeField["ExchangeID"] = "string"
#成交编号
CSecurityFtdcQryTradeField["TradeID"] = "string"
#开始时间
CSecurityFtdcQryTradeField["TradeTimeStart"] = "string"
#结束时间
CSecurityFtdcQryTradeField["TradeTimeEnd"] = "string"
structDict['CSecurityFtdcQryTradeField'] = CSecurityFtdcQryTradeField
#查询投资者持仓
CSecurityFtdcQryInvestorPositionField = {}
#经纪公司代码
CSecurityFtdcQryInvestorPositionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryInvestorPositionField["InvestorID"] = "string"
#合约代码
CSecurityFtdcQryInvestorPositionField["InstrumentID"] = "string"
structDict['CSecurityFtdcQryInvestorPositionField'] = CSecurityFtdcQryInvestorPositionField
#查询出入金流水
CSecurityFtdcQrySyncDepositField = {}
#经纪公司代码
CSecurityFtdcQrySyncDepositField["BrokerID"] = "string"
#出入金流水号
CSecurityFtdcQrySyncDepositField["DepositSeqNo"] = "string"
structDict['CSecurityFtdcQrySyncDepositField'] = CSecurityFtdcQrySyncDepositField
#用户口令变更
CSecurityFtdcUserPasswordUpdateField = {}
#经纪公司代码
CSecurityFtdcUserPasswordUpdateField["BrokerID"] = "string"
#用户代码
CSecurityFtdcUserPasswordUpdateField["UserID"] = "string"
#原来的口令
CSecurityFtdcUserPasswordUpdateField["OldPassword"] = "string"
#新的口令
CSecurityFtdcUserPasswordUpdateField["NewPassword"] = "string"
structDict['CSecurityFtdcUserPasswordUpdateField'] = CSecurityFtdcUserPasswordUpdateField
#资金账户口令变更域
CSecurityFtdcTradingAccountPasswordUpdateField = {}
#经纪公司代码
CSecurityFtdcTradingAccountPasswordUpdateField["BrokerID"] = "string"
#投资者帐号
CSecurityFtdcTradingAccountPasswordUpdateField["AccountID"] = "string"
#原来的口令
CSecurityFtdcTradingAccountPasswordUpdateField["OldPassword"] = "string"
#新的口令
CSecurityFtdcTradingAccountPasswordUpdateField["NewPassword"] = "string"
structDict['CSecurityFtdcTradingAccountPasswordUpdateField'] = CSecurityFtdcTradingAccountPasswordUpdateField
#手工同步用户动态令牌
CSecurityFtdcManualSyncBrokerUserOTPField = {}
#经纪公司代码
CSecurityFtdcManualSyncBrokerUserOTPField["BrokerID"] = "string"
#用户代码
CSecurityFtdcManualSyncBrokerUserOTPField["UserID"] = "string"
#动态令牌类型
CSecurityFtdcManualSyncBrokerUserOTPField["OTPType"] = "string"
#第一个动态密码
CSecurityFtdcManualSyncBrokerUserOTPField["FirstOTP"] = "string"
#第二个动态密码
CSecurityFtdcManualSyncBrokerUserOTPField["SecondOTP"] = "string"
structDict['CSecurityFtdcManualSyncBrokerUserOTPField'] = CSecurityFtdcManualSyncBrokerUserOTPField
#经纪公司用户口令
CSecurityFtdcBrokerUserPasswordField = {}
#经纪公司代码
CSecurityFtdcBrokerUserPasswordField["BrokerID"] = "string"
#用户代码
CSecurityFtdcBrokerUserPasswordField["UserID"] = "string"
#密码
CSecurityFtdcBrokerUserPasswordField["Password"] = "string"
structDict['CSecurityFtdcBrokerUserPasswordField'] = CSecurityFtdcBrokerUserPasswordField
#资金账户口令域
CSecurityFtdcTradingAccountPasswordField = {}
#经纪公司代码
CSecurityFtdcTradingAccountPasswordField["BrokerID"] = "string"
#投资者帐号
CSecurityFtdcTradingAccountPasswordField["AccountID"] = "string"
#密码
CSecurityFtdcTradingAccountPasswordField["Password"] = "string"
structDict['CSecurityFtdcTradingAccountPasswordField'] = CSecurityFtdcTradingAccountPasswordField
#用户权限
CSecurityFtdcUserRightField = {}
#经纪公司代码
CSecurityFtdcUserRightField["BrokerID"] = "string"
#用户代码
CSecurityFtdcUserRightField["UserID"] = "string"
#客户权限类型
CSecurityFtdcUserRightField["UserRightType"] = "string"
#是否禁止
CSecurityFtdcUserRightField["IsForbidden"] = "int"
structDict['CSecurityFtdcUserRightField'] = CSecurityFtdcUserRightField
#投资者账户
CSecurityFtdcInvestorAccountField = {}
#经纪公司代码
CSecurityFtdcInvestorAccountField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInvestorAccountField["InvestorID"] = "string"
#投资者帐号
CSecurityFtdcInvestorAccountField["AccountID"] = "string"
#是否主账户
CSecurityFtdcInvestorAccountField["IsDefault"] = "int"
#账户类型
CSecurityFtdcInvestorAccountField["AccountType"] = "string"
#是否活跃
CSecurityFtdcInvestorAccountField["IsActive"] = "int"
#上交所交易单元号
CSecurityFtdcInvestorAccountField["SHBranchPBU"] = "string"
#深交所交易单元号
CSecurityFtdcInvestorAccountField["SZBranchPBU"] = "string"
structDict['CSecurityFtdcInvestorAccountField'] = CSecurityFtdcInvestorAccountField
#用户IP
CSecurityFtdcUserIPField = {}
#经纪公司代码
CSecurityFtdcUserIPField["BrokerID"] = "string"
#用户代码
CSecurityFtdcUserIPField["UserID"] = "string"
#IP地址
CSecurityFtdcUserIPField["IPAddress"] = "string"
#IP地址掩码
CSecurityFtdcUserIPField["IPMask"] = "string"
#Mac地址
CSecurityFtdcUserIPField["MacAddress"] = "string"
structDict['CSecurityFtdcUserIPField'] = CSecurityFtdcUserIPField
#用户动态令牌参数
CSecurityFtdcBrokerUserOTPParamField = {}
#经纪公司代码
CSecurityFtdcBrokerUserOTPParamField["BrokerID"] = "string"
#用户代码
CSecurityFtdcBrokerUserOTPParamField["UserID"] = "string"
#动态令牌提供商
CSecurityFtdcBrokerUserOTPParamField["OTPVendorsID"] = "string"
#动态令牌序列号
CSecurityFtdcBrokerUserOTPParamField["SerialNumber"] = "string"
#令牌密钥
CSecurityFtdcBrokerUserOTPParamField["AuthKey"] = "string"
#漂移值
CSecurityFtdcBrokerUserOTPParamField["LastDrift"] = "int"
#成功值
CSecurityFtdcBrokerUserOTPParamField["LastSuccess"] = "int"
#动态令牌类型
CSecurityFtdcBrokerUserOTPParamField["OTPType"] = "string"
structDict['CSecurityFtdcBrokerUserOTPParamField'] = CSecurityFtdcBrokerUserOTPParamField
#用户登录请求
CSecurityFtdcReqUserLoginField = {}
#交易日
CSecurityFtdcReqUserLoginField["TradingDay"] = "string"
#经纪公司代码
CSecurityFtdcReqUserLoginField["BrokerID"] = "string"
#用户代码
CSecurityFtdcReqUserLoginField["UserID"] = "string"
#密码
CSecurityFtdcReqUserLoginField["Password"] = "string"
#用户端产品信息
CSecurityFtdcReqUserLoginField["UserProductInfo"] = "string"
#接口端产品信息
CSecurityFtdcReqUserLoginField["InterfaceProductInfo"] = "string"
#协议信息
CSecurityFtdcReqUserLoginField["ProtocolInfo"] = "string"
#Mac地址
CSecurityFtdcReqUserLoginField["MacAddress"] = "string"
#动态密码
CSecurityFtdcReqUserLoginField["OneTimePassword"] = "string"
#终端IP地址
CSecurityFtdcReqUserLoginField["ClientIPAddress"] = "string"
#客户端认证码
CSecurityFtdcReqUserLoginField["AuthCode"] = "string"
structDict['CSecurityFtdcReqUserLoginField'] = CSecurityFtdcReqUserLoginField
#用户登录应答
CSecurityFtdcRspUserLoginField = {}
#交易日
CSecurityFtdcRspUserLoginField["TradingDay"] = "string"
#登录成功时间
CSecurityFtdcRspUserLoginField["LoginTime"] = "string"
#经纪公司代码
CSecurityFtdcRspUserLoginField["BrokerID"] = "string"
#用户代码
CSecurityFtdcRspUserLoginField["UserID"] = "string"
#交易系统名称
CSecurityFtdcRspUserLoginField["SystemName"] = "string"
#前置编号
CSecurityFtdcRspUserLoginField["FrontID"] = "int"
#会话编号
CSecurityFtdcRspUserLoginField["SessionID"] = "int"
#最大报单引用
CSecurityFtdcRspUserLoginField["MaxOrderRef"] = "string"
structDict['CSecurityFtdcRspUserLoginField'] = CSecurityFtdcRspUserLoginField
#用户登出请求
CSecurityFtdcUserLogoutField = {}
#经纪公司代码
CSecurityFtdcUserLogoutField["BrokerID"] = "string"
#用户代码
CSecurityFtdcUserLogoutField["UserID"] = "string"
structDict['CSecurityFtdcUserLogoutField'] = CSecurityFtdcUserLogoutField
#全部登出信息
CSecurityFtdcLogoutAllField = {}
#前置编号
CSecurityFtdcLogoutAllField["FrontID"] = "int"
#会话编号
CSecurityFtdcLogoutAllField["SessionID"] = "int"
#系统名称
CSecurityFtdcLogoutAllField["SystemName"] = "string"
structDict['CSecurityFtdcLogoutAllField'] = CSecurityFtdcLogoutAllField
#强制交易员退出
CSecurityFtdcForceUserLogoutField = {}
#经纪公司代码
CSecurityFtdcForceUserLogoutField["BrokerID"] = "string"
#用户代码
CSecurityFtdcForceUserLogoutField["UserID"] = "string"
structDict['CSecurityFtdcForceUserLogoutField'] = CSecurityFtdcForceUserLogoutField
#经纪公司用户激活
CSecurityFtdcActivateBrokerUserField = {}
#经纪公司代码
CSecurityFtdcActivateBrokerUserField["BrokerID"] = "string"
#用户代码
CSecurityFtdcActivateBrokerUserField["UserID"] = "string"
structDict['CSecurityFtdcActivateBrokerUserField'] = CSecurityFtdcActivateBrokerUserField
#输入报单
CSecurityFtdcInputOrderField = {}
#经纪公司代码
CSecurityFtdcInputOrderField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInputOrderField["InvestorID"] = "string"
#合约代码
CSecurityFtdcInputOrderField["InstrumentID"] = "string"
#报单引用
CSecurityFtdcInputOrderField["OrderRef"] = "string"
#用户代码
CSecurityFtdcInputOrderField["UserID"] = "string"
#交易所代码
CSecurityFtdcInputOrderField["ExchangeID"] = "string"
#报单价格条件
CSecurityFtdcInputOrderField["OrderPriceType"] = "string"
#买卖方向
CSecurityFtdcInputOrderField["Direction"] = "string"
#组合开平标志
CSecurityFtdcInputOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CSecurityFtdcInputOrderField["CombHedgeFlag"] = "string"
#价格
CSecurityFtdcInputOrderField["LimitPrice"] = "string"
#数量
CSecurityFtdcInputOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CSecurityFtdcInputOrderField["TimeCondition"] = "string"
#GTD日期
CSecurityFtdcInputOrderField["GTDDate"] = "string"
#成交量类型
CSecurityFtdcInputOrderField["VolumeCondition"] = "string"
#最小成交量
CSecurityFtdcInputOrderField["MinVolume"] = "int"
#触发条件
CSecurityFtdcInputOrderField["ContingentCondition"] = "string"
#止损价
CSecurityFtdcInputOrderField["StopPrice"] = "float"
#强平原因
CSecurityFtdcInputOrderField["ForceCloseReason"] = "string"
#自动挂起标志
CSecurityFtdcInputOrderField["IsAutoSuspend"] = "int"
#业务单元
CSecurityFtdcInputOrderField["BusinessUnit"] = "string"
#请求编号
CSecurityFtdcInputOrderField["RequestID"] = "int"
#用户强评标志
CSecurityFtdcInputOrderField["UserForceClose"] = "int"
structDict['CSecurityFtdcInputOrderField'] = CSecurityFtdcInputOrderField
#输入报单操作
CSecurityFtdcInputOrderActionField = {}
#经纪公司代码
CSecurityFtdcInputOrderActionField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInputOrderActionField["InvestorID"] = "string"
#报单操作引用
CSecurityFtdcInputOrderActionField["OrderActionRef"] = "int"
#报单引用
CSecurityFtdcInputOrderActionField["OrderRef"] = "string"
#请求编号
CSecurityFtdcInputOrderActionField["RequestID"] = "int"
#前置编号
CSecurityFtdcInputOrderActionField["FrontID"] = "int"
#会话编号
CSecurityFtdcInputOrderActionField["SessionID"] = "int"
#交易所代码
CSecurityFtdcInputOrderActionField["ExchangeID"] = "string"
#操作标志
CSecurityFtdcInputOrderActionField["ActionFlag"] = "string"
#价格
CSecurityFtdcInputOrderActionField["LimitPrice"] = "float"
#数量变化
CSecurityFtdcInputOrderActionField["VolumeChange"] = "int"
#用户代码
CSecurityFtdcInputOrderActionField["UserID"] = "string"
#合约代码
CSecurityFtdcInputOrderActionField["InstrumentID"] = "string"
#交易所交易员代码
CSecurityFtdcInputOrderActionField["BranchPBU"] = "string"
#本地报单编号
CSecurityFtdcInputOrderActionField["OrderLocalID"] = "string"
structDict['CSecurityFtdcInputOrderActionField'] = CSecurityFtdcInputOrderActionField
#指定的合约
CSecurityFtdcSpecificInstrumentField = {}
#合约代码
CSecurityFtdcSpecificInstrumentField["InstrumentID"] = "string"
#交易所代码
CSecurityFtdcSpecificInstrumentField["ExchangeID"] = "string"
structDict['CSecurityFtdcSpecificInstrumentField'] = CSecurityFtdcSpecificInstrumentField
#指定的交易所
CSecurityFtdcSpecificExchangeField = {}
#交易所代码
CSecurityFtdcSpecificExchangeField["ExchangeID"] = "string"
structDict['CSecurityFtdcSpecificExchangeField'] = CSecurityFtdcSpecificExchangeField
#行情基础属性
CSecurityFtdcMarketDataBaseField = {}
#交易日
CSecurityFtdcMarketDataBaseField["TradingDay"] = "string"
#上次结算价
CSecurityFtdcMarketDataBaseField["PreSettlementPrice"] = "float"
#昨收盘
CSecurityFtdcMarketDataBaseField["PreClosePrice"] = "float"
#昨持仓量
CSecurityFtdcMarketDataBaseField["PreOpenInterest"] = "float"
#昨虚实度
CSecurityFtdcMarketDataBaseField["PreDelta"] = "float"
structDict['CSecurityFtdcMarketDataBaseField'] = CSecurityFtdcMarketDataBaseField
#行情静态属性
CSecurityFtdcMarketDataStaticField = {}
#今开盘
CSecurityFtdcMarketDataStaticField["OpenPrice"] = "float"
#最高价
CSecurityFtdcMarketDataStaticField["HighestPrice"] = "float"
#最低价
CSecurityFtdcMarketDataStaticField["LowestPrice"] = "float"
#今收盘
CSecurityFtdcMarketDataStaticField["ClosePrice"] = "float"
#涨停板价
CSecurityFtdcMarketDataStaticField["UpperLimitPrice"] = "float"
#跌停板价
CSecurityFtdcMarketDataStaticField["LowerLimitPrice"] = "float"
#本次结算价
CSecurityFtdcMarketDataStaticField["SettlementPrice"] = "float"
#今虚实度
CSecurityFtdcMarketDataStaticField["CurrDelta"] = "float"
structDict['CSecurityFtdcMarketDataStaticField'] = CSecurityFtdcMarketDataStaticField
#行情最新成交属性
CSecurityFtdcMarketDataLastMatchField = {}
#最新价
CSecurityFtdcMarketDataLastMatchField["LastPrice"] = "float"
#数量
CSecurityFtdcMarketDataLastMatchField["Volume"] = "int"
#成交金额
CSecurityFtdcMarketDataLastMatchField["Turnover"] = "float"
#持仓量
CSecurityFtdcMarketDataLastMatchField["OpenInterest"] = "float"
structDict['CSecurityFtdcMarketDataLastMatchField'] = CSecurityFtdcMarketDataLastMatchField
#行情最优价属性
CSecurityFtdcMarketDataBestPriceField = {}
#申买价一
CSecurityFtdcMarketDataBestPriceField["BidPrice1"] = "float"
#申买量一
CSecurityFtdcMarketDataBestPriceField["BidVolume1"] = "int"
#申卖价一
CSecurityFtdcMarketDataBestPriceField["AskPrice1"] = "float"
#申卖量一
CSecurityFtdcMarketDataBestPriceField["AskVolume1"] = "int"
structDict['CSecurityFtdcMarketDataBestPriceField'] = CSecurityFtdcMarketDataBestPriceField
#行情申买二、三属性
CSecurityFtdcMarketDataBid23Field = {}
#申买价二
CSecurityFtdcMarketDataBid23Field["BidPrice2"] = "float"
#申买量二
CSecurityFtdcMarketDataBid23Field["BidVolume2"] = "int"
#申买价三
CSecurityFtdcMarketDataBid23Field["BidPrice3"] = "float"
#申买量三
CSecurityFtdcMarketDataBid23Field["BidVolume3"] = "int"
structDict['CSecurityFtdcMarketDataBid23Field'] = CSecurityFtdcMarketDataBid23Field
#行情申卖二、三属性
CSecurityFtdcMarketDataAsk23Field = {}
#申卖价二
CSecurityFtdcMarketDataAsk23Field["AskPrice2"] = "float"
#申卖量二
CSecurityFtdcMarketDataAsk23Field["AskVolume2"] = "int"
#申卖价三
CSecurityFtdcMarketDataAsk23Field["AskPrice3"] = "float"
#申卖量三
CSecurityFtdcMarketDataAsk23Field["AskVolume3"] = "int"
structDict['CSecurityFtdcMarketDataAsk23Field'] = CSecurityFtdcMarketDataAsk23Field
#行情申买四、五属性
CSecurityFtdcMarketDataBid45Field = {}
#申买价四
CSecurityFtdcMarketDataBid45Field["BidPrice4"] = "float"
#申买量四
CSecurityFtdcMarketDataBid45Field["BidVolume4"] = "int"
#申买价五
CSecurityFtdcMarketDataBid45Field["BidPrice5"] = "float"
#申买量五
CSecurityFtdcMarketDataBid45Field["BidVolume5"] = "int"
structDict['CSecurityFtdcMarketDataBid45Field'] = CSecurityFtdcMarketDataBid45Field
#行情申卖四、五属性
CSecurityFtdcMarketDataAsk45Field = {}
#申卖价四
CSecurityFtdcMarketDataAsk45Field["AskPrice4"] = "float"
#申卖量四
CSecurityFtdcMarketDataAsk45Field["AskVolume4"] = "int"
#申卖价五
CSecurityFtdcMarketDataAsk45Field["AskPrice5"] = "float"
#申卖量五
CSecurityFtdcMarketDataAsk45Field["AskVolume5"] = "int"
structDict['CSecurityFtdcMarketDataAsk45Field'] = CSecurityFtdcMarketDataAsk45Field
#行情更新时间属性
CSecurityFtdcMarketDataUpdateTimeField = {}
#合约代码
CSecurityFtdcMarketDataUpdateTimeField["InstrumentID"] = "string"
#最后修改时间
CSecurityFtdcMarketDataUpdateTimeField["UpdateTime"] = "string"
#最后修改毫秒
CSecurityFtdcMarketDataUpdateTimeField["UpdateMillisec"] = "int"
#业务日期
CSecurityFtdcMarketDataUpdateTimeField["ActionDay"] = "string"
structDict['CSecurityFtdcMarketDataUpdateTimeField'] = CSecurityFtdcMarketDataUpdateTimeField
#成交均价
CSecurityFtdcMarketDataAveragePriceField = {}
#当日均价
CSecurityFtdcMarketDataAveragePriceField["AveragePrice"] = "float"
structDict['CSecurityFtdcMarketDataAveragePriceField'] = CSecurityFtdcMarketDataAveragePriceField
#行情交易所代码属性
CSecurityFtdcMarketDataExchangeField = {}
#交易所代码
CSecurityFtdcMarketDataExchangeField["ExchangeID"] = "string"
structDict['CSecurityFtdcMarketDataExchangeField'] = CSecurityFtdcMarketDataExchangeField
#信息分发
CSecurityFtdcDisseminationField = {}
#序列系列号
CSecurityFtdcDisseminationField["SequenceSeries"] = "int"
#序列号
CSecurityFtdcDisseminationField["SequenceNo"] = "int"
structDict['CSecurityFtdcDisseminationField'] = CSecurityFtdcDisseminationField
#资金转账输入
CSecurityFtdcInputFundTransferField = {}
#经纪公司代码
CSecurityFtdcInputFundTransferField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcInputFundTransferField["InvestorID"] = "string"
#投资者资金帐号
CSecurityFtdcInputFundTransferField["AccountID"] = "string"
#资金帐户密码
CSecurityFtdcInputFundTransferField["Password"] = "string"
#用户代码
CSecurityFtdcInputFundTransferField["UserID"] = "string"
#交易金额
CSecurityFtdcInputFundTransferField["TradeAmount"] = "float"
#摘要
CSecurityFtdcInputFundTransferField["Digest"] = "string"
#账户类型
CSecurityFtdcInputFundTransferField["AccountType"] = "string"
structDict['CSecurityFtdcInputFundTransferField'] = CSecurityFtdcInputFundTransferField
#资金转账
CSecurityFtdcFundTransferField = {}
#经纪公司代码
CSecurityFtdcFundTransferField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcFundTransferField["InvestorID"] = "string"
#投资者资金帐号
CSecurityFtdcFundTransferField["AccountID"] = "string"
#资金帐户密码
CSecurityFtdcFundTransferField["Password"] = "string"
#用户代码
CSecurityFtdcFundTransferField["UserID"] = "string"
#交易金额
CSecurityFtdcFundTransferField["TradeAmount"] = "float"
#摘要
CSecurityFtdcFundTransferField["Digest"] = "string"
#会话编号
CSecurityFtdcFundTransferField["SessionID"] = "int"
#Liber核心流水号
CSecurityFtdcFundTransferField["LiberSerial"] = "int"
#转账平台流水号
CSecurityFtdcFundTransferField["PlateSerial"] = "int"
#第三方流水号
CSecurityFtdcFundTransferField["TransferSerial"] = "string"
#交易日
CSecurityFtdcFundTransferField["TradingDay"] = "string"
#转账时间
CSecurityFtdcFundTransferField["TradeTime"] = "string"
#出入金方向
CSecurityFtdcFundTransferField["FundDirection"] = "string"
#错误代码
CSecurityFtdcFundTransferField["ErrorID"] = "int"
#错误信息
CSecurityFtdcFundTransferField["ErrorMsg"] = "string"
structDict['CSecurityFtdcFundTransferField'] = CSecurityFtdcFundTransferField
#资金转账查询请求
CSecurityFtdcQryFundTransferSerialField = {}
#经纪公司代码
CSecurityFtdcQryFundTransferSerialField["BrokerID"] = "string"
#投资者资金帐号
CSecurityFtdcQryFundTransferSerialField["AccountID"] = "string"
#账户类型
CSecurityFtdcQryFundTransferSerialField["AccountType"] = "string"
structDict['CSecurityFtdcQryFundTransferSerialField'] = CSecurityFtdcQryFundTransferSerialField
#资金内转
CSecurityFtdcFundInterTransferField = {}
#经纪公司代码
CSecurityFtdcFundInterTransferField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcFundInterTransferField["InvestorID"] = "string"
#用户代码
CSecurityFtdcFundInterTransferField["UserID"] = "string"
#资金账户代码
CSecurityFtdcFundInterTransferField["AccountID"] = "string"
#资金账户密码
CSecurityFtdcFundInterTransferField["Password"] = "string"
#金额
CSecurityFtdcFundInterTransferField["TradeAmount"] = "float"
#内转类型
CSecurityFtdcFundInterTransferField["TransferType"] = "string"
#资金内转编号
CSecurityFtdcFundInterTransferField["SerialID"] = "int"
structDict['CSecurityFtdcFundInterTransferField'] = CSecurityFtdcFundInterTransferField
#资金内转流水
CSecurityFtdcFundInterTransferSerialField = {}
#经纪公司代码
CSecurityFtdcFundInterTransferSerialField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcFundInterTransferSerialField["InvestorID"] = "string"
#用户代码
CSecurityFtdcFundInterTransferSerialField["UserID"] = "string"
#资金账户代码
CSecurityFtdcFundInterTransferSerialField["AccountID"] = "string"
#资金账户密码
CSecurityFtdcFundInterTransferSerialField["Password"] = "string"
#金额
CSecurityFtdcFundInterTransferSerialField["TradeAmount"] = "float"
#内转类型
CSecurityFtdcFundInterTransferSerialField["TransferType"] = "string"
#资金内转编号
CSecurityFtdcFundInterTransferSerialField["SerialID"] = "int"
#转账时间
CSecurityFtdcFundInterTransferSerialField["TransferTime"] = "string"
#错误代码
CSecurityFtdcFundInterTransferSerialField["ErrorID"] = "int"
#错误信息
CSecurityFtdcFundInterTransferSerialField["ErrorMsg"] = "string"
structDict['CSecurityFtdcFundInterTransferSerialField'] = CSecurityFtdcFundInterTransferSerialField
#资金内转流水查询请求
CSecurityFtdcQryFundInterTransferSerialField = {}
#经纪公司代码
CSecurityFtdcQryFundInterTransferSerialField["BrokerID"] = "string"
#投资者代码
CSecurityFtdcQryFundInterTransferSerialField["InvestorID"] = "string"
structDict['CSecurityFtdcQryFundInterTransferSerialField'] = CSecurityFtdcQryFundInterTransferSerialField
#获取数据库信息
CSecurityFtdcFetchDBInfoField = {}
#用户代码
CSecurityFtdcFetchDBInfoField["UserID"] = "string"
#密码
CSecurityFtdcFetchDBInfoField["Password"] = "string"
#数据库索引
CSecurityFtdcFetchDBInfoField["DBIndex"] = "string"
#数据库IP地址
CSecurityFtdcFetchDBInfoField["IPAddress"] = "string"
#数据库IP端口
CSecurityFtdcFetchDBInfoField["IPPort"] = "int"
#数据库名称
CSecurityFtdcFetchDBInfoField["DBName"] = "string"
#数据库用户名
CSecurityFtdcFetchDBInfoField["DBUserID"] = "string"
#数据库密码
CSecurityFtdcFetchDBInfoField["DBPassword"] = "string"
structDict['CSecurityFtdcFetchDBInfoField'] = CSecurityFtdcFetchDBInfoField
#MD用户信息
CSecurityFtdcMDUserInfoField = {}
#经纪公司代码
CSecurityFtdcMDUserInfoField["BrokerID"] = "string"
#用户代码
CSecurityFtdcMDUserInfoField["UserID"] = "string"
#用户名称
CSecurityFtdcMDUserInfoField["UserName"] = "string"
#密码
CSecurityFtdcMDUserInfoField["Password"] = "string"
#行情系统编号
CSecurityFtdcMDUserInfoField["MDSysID"] = "int"
structDict['CSecurityFtdcMDUserInfoField'] = CSecurityFtdcMDUserInfoField
| mit |
claudep/translate | translate/convert/test_json2po.py | 1 | 2735 | #!/usr/bin/env python
from translate.convert import json2po, test_convert
from translate.misc import wStringIO
from translate.storage import jsonl10n
class TestJson2PO:
def json2po(self, jsonsource, template=None, filter=None):
"""helper that converts json source to po source without requiring files"""
inputfile = wStringIO.StringIO(jsonsource)
inputjson = jsonl10n.JsonFile(inputfile, filter=filter)
convertor = json2po.json2po()
outputpo = convertor.convert_store(inputjson)
return outputpo
def singleelement(self, storage):
"""checks that the pofile contains a single non-header element, and returns it"""
print(storage.serialize())
assert len(storage.units) == 1
return storage.units[0]
def test_simple(self):
"""test the most basic json conversion"""
jsonsource = '''{ "text": "A simple string"}'''
poexpected = '''#: .text
msgid "A simple string"
msgstr ""
'''
poresult = self.json2po(jsonsource)
assert str(poresult.units[1]) == poexpected
def test_filter(self):
"""test basic json conversion with filter option"""
jsonsource = '''{ "text": "A simple string", "number": 42 }'''
poexpected = '''#: .text
msgid "A simple string"
msgstr ""
'''
poresult = self.json2po(jsonsource, filter=["text"])
assert str(poresult.units[1]) == poexpected
def test_miltiple_units(self):
"""test that we can handle json with multiple units"""
jsonsource = '''
{
"name": "John",
"surname": "Smith",
"address":
{
"streetAddress": "Koeistraat 21",
"city": "Pretoria",
"country": "South Africa",
"postalCode": "10021"
},
"phoneNumber":
[
{
"type": "home",
"number": "012 345-6789"
},
{
"type": "fax",
"number": "012 345-6788"
}
]
}
'''
poresult = self.json2po(jsonsource)
assert poresult.units[0].isheader()
print(len(poresult.units))
assert len(poresult.units) == 11
class TestJson2POCommand(test_convert.TestConvertCommand, TestJson2PO):
"""Tests running actual json2po commands on files"""
convertmodule = json2po
defaultoptions = {"progress": "none"}
def test_help(self):
"""tests getting help"""
options = test_convert.TestConvertCommand.test_help(self)
options = self.help_check(options, "-P, --pot")
options = self.help_check(options, "--duplicates")
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE")
options = self.help_check(options, "--filter", last=True)
| gpl-2.0 |
thomashaw/SecGen | modules/utilities/unix/audit_tools/ghidra/files/release/Ghidra/Features/Python/data/jython-2.7.1/Lib/nntplib.py | 92 | 21470 | """An NNTP client class based on RFC 977: Network News Transfer Protocol.
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print 'Group', name, 'has', count, 'articles, range', first, 'to', last
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', first + '-' + last)
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'r') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Imports
import re
import socket
__all__ = ["NNTP","NNTPReplyError","NNTPTemporaryError",
"NNTPPermanentError","NNTPProtocolError","NNTPDataError",
"error_reply","error_temp","error_perm","error_proto",
"error_data",]
# maximal line length when calling readline(). This is to prevent
# reading arbitrary length lines. RFC 3977 limits NNTP line length to
# 512 characters, including CRLF. We have selected 2048 just to be on
# the safe side.
_MAXLINE = 2048
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
# for backwards compatibility
error_reply = NNTPReplyError
error_temp = NNTPTemporaryError
error_perm = NNTPPermanentError
error_proto = NNTPProtocolError
error_data = NNTPDataError
# Standard port used by NNTP servers
NNTP_PORT = 119
# Response numbers that are followed by additional text (e.g. article)
LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class NNTP:
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.create_connection((host, port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
# 'mode reader' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'mode reader' and 'authinfo' need to
# arrive differs between some NNTP servers. Try to send
# 'mode reader', and if it fails with an authorization failed
# error, try again after sending authinfo.
readermode_afterauth = 0
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
except NNTPTemporaryError, e:
if user and e.response[:3] == '480':
# Need authorization before 'mode reader'
readermode_afterauth = 1
else:
raise
# If no login/password was specified, try to get them from ~/.netrc
# Presume that if .netc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(host)
if auth:
user = auth[0]
password = auth[2]
except IOError:
pass
# Perform NNRP authentication if needed.
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
if readermode_afterauth:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print '*welcome*', repr(self.welcome)
return self.welcome
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', repr(line)
self.sock.sendall(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', repr(line)
self.putline(line)
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise NNTPDataError('line too long')
if self.debugging > 1:
print '*get*', repr(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', repr(resp)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
openedFile = None
try:
# If a string was passed then open a file with that name
if isinstance(file, str):
openedFile = file = open(file, "w")
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
if file:
file.write(line + "\n")
else:
list.append(line)
finally:
# If this method created the file, then it must close it
if openedFile:
openedFile.close()
return resp, list
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp(file)
def newgroups(self, date, time, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
return self.longcmd('NEWGROUPS ' + date + ' ' + time, file)
def newnews(self, group, date, time, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of message ids"""
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd, file)
def list(self, file=None):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
resp, list = self.longcmd('LIST', file)
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(list[i].split())
return resp, list
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
resp, lines = self.descriptions(group)
if len(lines) == 0:
return ""
else:
return lines[0][1]
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
line_pat = re.compile("^(?P<group>[^ \t]+)[ \t]+(.*)$")
# Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
resp, raw_lines = self.longcmd('LIST NEWSGROUPS ' + group_pattern)
if resp[:3] != "215":
# Now the deprecated XGTITLE. This either raises an error
# or succeeds with the same output structure as LIST
# NEWSGROUPS.
resp, raw_lines = self.longcmd('XGTITLE ' + group_pattern)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, count, first, last, name
def help(self, file=None):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
return self.longcmd('HELP',file)
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = resp.split()
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the message id"""
return self.statcmd('STAT ' + id)
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line, file)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
return self.artcmd('HEAD ' + id)
def body(self, id, file=None):
"""Process a BODY command. Argument:
- id: article number or message id
- file: Filename string or file object to store the article in
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body or an empty list
if file was used"""
return self.artcmd('BODY ' + id, file)
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
return self.artcmd('ARTICLE ' + id)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
return self.shortcmd('SLAVE')
def xhdr(self, hdr, str, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str, file)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
def xover(self, start, end, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
resp, lines = self.longcmd('XOVER ' + start + '-' + end, file)
xover_lines = []
for line in lines:
elem = line.split("\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
elem[5].split(),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
def xgtitle(self, group, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
# Test retrieval when run as a script.
# Assumption: if there's a local news server, it's called 'news'.
# Assumption: if user queries a remote news server, it's named
# in the environment variable NNTPSERVER (used by slrn and kin)
# and we want readermode off.
if __name__ == '__main__':
import os
newshost = 'news' and os.environ["NNTPSERVER"]
if newshost.find('.') == -1:
mode = 'readermode'
else:
mode = None
s = NNTP(newshost, readermode=mode)
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
| gpl-3.0 |
chrisdearman/micropython | tests/basics/special_methods.py | 28 | 2100 | class Cud():
def __init__(self):
print("__init__ called")
def __repr__(self):
print("__repr__ called")
return ""
def __lt__(self, other):
print("__lt__ called")
def __le__(self, other):
print("__le__ called")
def __eq__(self, other):
print("__eq__ called")
def __ne__(self, other):
print("__ne__ called")
def __ge__(self, other):
print("__ge__ called")
def __gt__(self, other):
print("__gt__ called")
def __abs__(self):
print("__abs__ called")
def __add__(self, other):
print("__add__ called")
def __and__(self, other):
print("__and__ called")
def __floordiv__(self, other):
print("__floordiv__ called")
def __index__(self, other):
print("__index__ called")
def __inv__(self):
print("__inv__ called")
def __invert__(self):
print("__invert__ called")
def __lshift__(self, val):
print("__lshift__ called")
def __mod__(self, val):
print("__mod__ called")
def __mul__(self, other):
print("__mul__ called")
def __matmul__(self, other):
print("__matmul__ called")
def __neg__(self):
print("__neg__ called")
def __or__(self, other):
print("__or__ called")
def __pos__(self):
print("__pos__ called")
def __pow__(self, val):
print("__pow__ called")
def __rshift__(self, val):
print("__rshift__ called")
def __sub__(self, other):
print("__sub__ called")
def __truediv__(self, other):
print("__truediv__ called")
def __div__(self, other):
print("__div__ called")
def __xor__(self, other):
print("__xor__ called")
def __iadd__(self, other):
print("__iadd__ called")
return self
def __isub__(self, other):
print("__isub__ called")
return self
cud1 = Cud()
cud2 = Cud()
str(cud1)
cud1 < cud2
cud1 <= cud2
cud1 == cud2
cud1 >= cud2
cud1 > cud2
cud1 + cud2
cud1 - cud2
# more in special_methods2.py
| mit |
wayne2000/myProg | myPython/SComConf.py | 1 | 10821 | #!/usr/bin/python
__author__ = 'Wei'
from Tkinter import *
import Tkinter as tk
import tkMessageBox
import serial
import time
import thread
import traceback
import binascii
class ScrolledText(tk.Frame):
def __init__(self, parent, *args, **kwargs):
tk.Frame.__init__(self, parent)
self.text = tk.Text(self, *args, **kwargs)
self.vsb = tk.Scrollbar(self, orient="vertical", command=self.text.yview)
self.text.configure(yscrollcommand=self.vsb.set)
self.vsb.pack(side="right", fill="y")
self.text.pack(side="left", fill="both", expand=True)
# expose some text methods as methods on this object
self.insert = self.text.insert
self.delete = self.text.delete
self.mark_set = self.text.mark_set
self.get = self.text.get
self.index = self.text.index
self.search = self.text.search
class inputCellE(tk.Frame):
def __init__(self, parent, ltxt, default):
tk.Frame.__init__(self, parent, class_='inputCellE')
self.pack(side=TOP)
self.creatWidget(ltxt, default)
self.get = self.p.get
def creatWidget(self, ltxt, default):
self.l = Label(self, text=ltxt, width=10, anchor=E, padx=5)
self.l.pack(side=LEFT)
self.p = Entry(self)
self.p.insert(1, default)
self.p.pack(side=RIGHT)
class inputCellLB(tk.Frame):
def __init__(self, parent, ltxt, alist):
tk.Frame.__init__(self, parent, class_='inputCellLB')
self.pack(side=TOP)
self.creatWidget(ltxt, alist)
self.get = self.p.get
def creatWidget(self, ltxt, alist):
self.l = Label(self, text=ltxt, width=10, anchor=E, padx=5)
self.l.pack(side=LEFT)
self.p = Listbox(self)
self.p.insert(END, *alist)
self.p.pack(side=RIGHT)
class inputCellOM(tk.Frame):
def __init__(self, parent, ltxt, alist, default):
# type: (object, object, object, object) -> object
tk.Frame.__init__(self, parent, class_='inputCellOM')
self.pack(side=TOP)
self.creatWidget(ltxt, alist, default)
self.get = self.v.get
def creatWidget(self, ltxt, alist, default):
self.l = Label(self, text=ltxt, width=10, anchor=E, padx=5)
self.l.pack(side=LEFT)
self.v = StringVar()
self.v.set(default)
self.p = OptionMenu(self, self.v, *alist)
self.p.pack(side=RIGHT)
class mainWin(tk.Frame):
def __init__(self, parent):
tk.Frame.__init__(self, parent, class_='mainWin')
self.pack(fill=BOTH, expand=TRUE)
self.creatMenu(parent)
self.creatWidget()
def creatWidget(self):
pw1 = PanedWindow(self)
pw1.pack(fill=BOTH, expand=1)
frameInput = LabelFrame(pw1, text="SCom Parameters", height=200, width=30)
frameInput.pack(fill="both", expand="yes")
pw1.add(frameInput)
self.port = inputCellOM(frameInput, "Port:", ser.port_list, ser.port_list[0])
self.baudrate = inputCellE(frameInput, "Baudrate:", "19200")
self.timeout = inputCellE(frameInput, "Timeout(s):", "5")
self.bytesize = inputCellOM(frameInput, "DataBits:", ser.bytesize_list, ser.bytesize_list[3])
self.parity = inputCellOM(frameInput, "Parity:", ser.parity_list, ser.parity_list[0])
self.stopbits = inputCellOM(frameInput, "StopBits:", ser.stopbits_list, ser.stopbits_list[0])
subframe1 = tk.Frame(frameInput)
subframe1.pack(side=TOP)
self.xonxoff = IntVar()
cb_xonxoff = Checkbutton(subframe1, text="XON/XOFF", variable=self.xonxoff)
cb_xonxoff.pack(side=LEFT)
self.rtscts = IntVar()
cb_rtscts = Checkbutton(subframe1, text="RTS/CTS", variable=self.rtscts)
cb_rtscts.pack(side=RIGHT)
self.inHex = IntVar()
cb_inHex = Checkbutton(subframe1, text="inHEX", variable=self.inHex)
cb_inHex.pack()
self.isSerialEnabled = False
self.text_button_en = StringVar()
self.text_button_en.set('Serial Disabled')
self.button_en = Button(frameInput, textvariable=self.text_button_en, command=do_en_serial, bg='red',
activebackground='red')
self.button_en.pack(side=TOP)
pw2 = PanedWindow(pw1, orient=VERTICAL)
pw1.add(pw2)
frameTransmitted = LabelFrame(pw2, text="Transmitted", width=800, height=100)
self.entry_tx = Entry(frameTransmitted)
self.entry_tx.pack(side=LEFT, fill=X, expand=1)
button_tx = Button(frameTransmitted, text='Send', command=do_tx)
button_tx.pack(side=RIGHT)
pw2.add(frameTransmitted)
frameReceived = LabelFrame(pw2, text="Received", width=800)
self.text_recv = ScrolledText(frameReceived)
self.text_recv.pack(side=TOP, fill=BOTH, expand=1)
pw2.add(frameReceived)
def creatMenu(self, parent):
menubar = Menu(self)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label="Open", command=open)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=parent.quit)
menubar.add_cascade(label="File", menu=filemenu)
helpmenu = Menu(menubar, tearoff=0)
helpmenu.add_command(label="About", command=about)
menubar.add_cascade(label="Help", menu=helpmenu)
parent.config(menu=menubar)
def updateRxWin(self, aStr):
self.text_recv.insert(tk.END, aStr)
def about():
tkMessageBox.showinfo("About",
"Serial COM Port Configuration & Test Tool\ndesigned by Wei in Sep 2016\nEmail: futurewayne@gmail.com")
def open():
tkMessageBox.showinfo("Open SCom Configuration...", "not implemented yet")
def do_tx():
txStr = mywin.entry_tx.get()
tmp = "".join(txStr.split())
print tmp
ser.transmit(tmp)
def do_en_serial():
if mywin.isSerialEnabled:
mywin.isSerialEnabled = False
mywin.text_button_en.set('Serial Disabled')
mywin.button_en.configure(bg='red')
mywin.button_en.configure(activebackground='red')
ser.close_port()
else:
mywin.isSerialEnabled = True
mywin.text_button_en.set('Serial Enabled')
mywin.button_en.configure(bg='green')
mywin.button_en.configure(activebackground='green')
print "COM port: %s" % mywin.port.get()
print "Baudrate: %d" % int(mywin.baudrate.get())
print "Timeout: %d" % int(mywin.timeout.get())
print "Bytesize: %d" % int(mywin.bytesize.get())
print "Parity: %s" % mywin.parity.get()[0]
print "Stopbits: %f" % int(mywin.stopbits.get())
print "XONXOFF: %d" % mywin.xonxoff.get()
print "RTSCTS: %d" % mywin.rtscts.get()
print "inHEX: %d" % mywin.inHex.get()
if False == ser.open_port(mywin.port.get(),
mywin.baudrate.get(),
int(mywin.timeout.get()),
int(mywin.bytesize.get()),
mywin.parity.get()[0],
int(mywin.stopbits.get()),
mywin.xonxoff.get(),
mywin.rtscts.get(),
mywin.inHex.get()):
mywin.isSerialEnabled = False
mywin.text_button_en.set('Serial Disabled')
mywin.button_en.configure(bg='red')
mywin.button_en.configure(activebackground='red')
class aSerial:
port_list = ['NULL']
parity_list = ('NONE', 'EVEN', 'ODD', 'MARK', 'SPACE')
stopbits_list = ('1', '1.5', '2')
bytesize_list = ('5', '6', '7', '8')
def __init__(self):
self.list_ports()
self.isOpen = False
self.rxStr = ''
self.inHex = False
self.rx_cnt = 0
self.tx_cnt = 0
self.crlf = 0
self.sp = 'NULL'
def __del__(self):
self.isOpen = False
time.sleep(1)
self.close_port()
def list_ports(self):
import os
if os.name == 'nt':
from serial.tools.list_ports_windows import comports
elif os.name == 'posix':
from serial.tools.list_ports_posix import comports
else:
raise ImportError("Sorry: no implementation for your platform ('%s') available" % (os.name,))
ports = sorted(comports())
for n, (port, desc, hwid) in enumerate(ports, 1):
self.port_list.append(port)
def open_port(self, port, baudrate, timeout, bytesize, parity, stopbits, xonxoff, rtscts, inHex):
if port != 'NULL':
try:
self.sp = serial.Serial(port=port, baudrate=baudrate, bytesize=bytesize,
parity=parity, stopbits=stopbits, xonxoff=xonxoff,
rtscts=rtscts, timeout=timeout, write_timeout=timeout)
except:
traceback.print_exc()
return False
if self.sp.isOpen():
print "%s is opened" % port
self.isOpen = True
self.inHex = inHex
thread.start_new_thread(self.receive, ())
return True
else:
print "Serial port is NULL"
return False
def close_port(self):
self.isOpen = False
time.sleep(1)
if self.sp != 'NULL':
self.sp.close()
def receive(self):
while self.isOpen == True:
while ser.sp.inWaiting() > 0:
rx = ser.sp.read(1)
self.rx_cnt += 1
self.rxStr += rx
if rx == '\n':
continue
if len(self.rxStr) > 0:
if self.inHex:
aStr = binascii.hexlify(self.rxStr)
else:
aStr = self.rxStr
print "RX (%d bytes): %s" % (self.rx_cnt, aStr)
mywin.updateRxWin(aStr)
self.rxStr = ''
else:
print "Serial is closed. Receiving function exits."
def transmit(self, aStr):
print "aTX: %s" % aStr
if self.isOpen == True:
if self.inHex:
try:
bStr = binascii.unhexlify(aStr)
except:
traceback.print_exc()
tkMessageBox.showinfo("Input Error", "Need Even-length string for transmition in HEX mode!")
return
else:
bStr = aStr + '\n'
print "bTX: %s" % bStr
ser.sp.write(bStr)
if __name__ == "__main__":
ser = aSerial()
rootWin = tk.Tk()
rootWin.title('Serial COM Port Configuration & Test')
mywin = mainWin(rootWin)
rootWin.mainloop()
| gpl-2.0 |
jendap/tensorflow | tensorflow/python/ops/template.py | 3 | 29586 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides templates which allow variable sharing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import traceback
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.training.checkpointable import util as checkpointable_util
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_decorator
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.tf_export import tf_export
__all__ = ["make_template"]
@tf_export(v1=["make_template"])
def make_template(name_, func_, create_scope_now_=False, unique_name_=None,
custom_getter_=None, **kwargs):
"""Given an arbitrary function, wrap it so that it does variable sharing.
This wraps `func_` in a Template and partially evaluates it. Templates are
functions that create variables the first time they are called and reuse them
thereafter. In order for `func_` to be compatible with a `Template` it must
have the following properties:
* The function should create all trainable variables and any variables that
should be reused by calling `tf.get_variable`. If a trainable variable is
created using `tf.Variable`, then a ValueError will be thrown. Variables
that are intended to be locals can be created by specifying
`tf.Variable(..., trainable=false)`.
* The function may use variable scopes and other templates internally to
create and reuse variables, but it shouldn't use `tf.global_variables` to
capture variables that are defined outside of the scope of the function.
* Internal scopes and variable names should not depend on any arguments that
are not supplied to `make_template`. In general you will get a ValueError
telling you that you are trying to reuse a variable that doesn't exist
if you make a mistake.
In the following example, both `z` and `w` will be scaled by the same `y`. It
is important to note that if we didn't assign `scalar_name` and used a
different name for z and w that a `ValueError` would be thrown because it
couldn't reuse the variable.
```python
def my_op(x, scalar_name):
var1 = tf.get_variable(scalar_name,
shape=[],
initializer=tf.constant_initializer(1))
return x * var1
scale_by_y = tf.make_template('scale_by_y', my_op, scalar_name='y')
z = scale_by_y(input1)
w = scale_by_y(input2)
```
As a safe-guard, the returned function will raise a `ValueError` after the
first call if trainable variables are created by calling `tf.Variable`.
If all of these are true, then 2 properties are enforced by the template:
1. Calling the same template multiple times will share all non-local
variables.
2. Two different templates are guaranteed to be unique, unless you reenter the
same variable scope as the initial definition of a template and redefine
it. An examples of this exception:
```python
def my_op(x, scalar_name):
var1 = tf.get_variable(scalar_name,
shape=[],
initializer=tf.constant_initializer(1))
return x * var1
with tf.variable_scope('scope') as vs:
scale_by_y = tf.make_template('scale_by_y', my_op, scalar_name='y')
z = scale_by_y(input1)
w = scale_by_y(input2)
# Creates a template that reuses the variables above.
with tf.variable_scope(vs, reuse=True):
scale_by_y2 = tf.make_template('scale_by_y', my_op, scalar_name='y')
z2 = scale_by_y2(input1)
w2 = scale_by_y2(input2)
```
Depending on the value of `create_scope_now_`, the full variable scope may be
captured either at the time of first call or at the time of construction. If
this option is set to True, then all Tensors created by repeated calls to the
template will have an extra trailing _N+1 to their name, as the first time the
scope is entered in the Template constructor no Tensors are created.
Note: `name_`, `func_` and `create_scope_now_` have a trailing underscore to
reduce the likelihood of collisions with kwargs.
Args:
name_: A name for the scope created by this template. If necessary, the name
will be made unique by appending `_N` to the name.
func_: The function to wrap.
create_scope_now_: Boolean controlling whether the scope should be created
when the template is constructed or when the template is called. Default
is False, meaning the scope is created when the template is called.
unique_name_: When used, it overrides name_ and is not made unique. If a
template of the same scope/unique_name already exists and reuse is false,
an error is raised. Defaults to None.
custom_getter_: Optional custom getter for variables used in `func_`. See
the `tf.get_variable` `custom_getter` documentation for
more information.
**kwargs: Keyword arguments to apply to `func_`.
Returns:
A function to encapsulate a set of variables which should be created once
and reused. An enclosing scope will be created either when `make_template`
is called or when the result is called, depending on the value of
`create_scope_now_`. Regardless of the value, the first time the template
is called it will enter the scope with no reuse, and call `func_` to create
variables, which are guaranteed to be unique. All subsequent calls will
re-enter the scope and reuse those variables.
Raises:
ValueError: if `name_` is None.
"""
return make_template_internal(
name_,
func_,
create_scope_now_,
unique_name_,
custom_getter_,
create_graph_function_=False,
**kwargs)
def make_template_internal(name_,
func_,
create_scope_now_=False,
unique_name_=None,
custom_getter_=None,
create_graph_function_=False,
**kwargs):
"""Make a template, optionally compiling func_ into a graph function.
See `make_template` for full documentation.
Args:
name_: A name for the scope created by this template. If necessary, the name
will be made unique by appending `_N` to the name.
func_: The function to wrap.
create_scope_now_: Boolean controlling whether the scope should be created
when the template is constructed or when the template is called. Default
is False, meaning the scope is created when the template is called.
unique_name_: When used, it overrides name_ and is not made unique. If a
template of the same scope/unique_name already exists and reuse is false,
an error is raised. Defaults to None. If executing eagerly, must be None.
custom_getter_: Optional custom getter for variables used in `func_`. See
the `tf.get_variable` `custom_getter` documentation for
more information.
create_graph_function_: When True, `func_` will be executed as a graph
function. This implies that `func_` must satisfy the properties that
`function.defun` requires of functions: See the documentation of
`function.defun` for details. When executing eagerly, setting this flag to
True can improve performance. Regardless of whether eager execution is
enabled, enabling this flag gives the caller access to graph-function
semantics, i.e., accesses to variables are totally ordered and
side-effecting ops are not pruned.
**kwargs: Keyword arguments to apply to `func_`.
Returns:
A function to encapsulate a set of variables which should be created once
and reused. An enclosing scope will be created either when `make_template`
is called or when the result is called, depending on the value of
`create_scope_now_`. Regardless of the value, the first time the template
is called it will enter the scope with no reuse, and call `func_` to create
variables, which are guaranteed to be unique. All subsequent calls will
re-enter the scope and reuse those variables.
Raises:
ValueError: if `name_` is None.
ValueError: if `unique_name_` is not None and eager execution is enabled.
"""
if kwargs:
func_ = tf_decorator.make_decorator(func_, functools.partial(
func_, **kwargs))
if context.executing_eagerly():
if unique_name_ is not None:
raise ValueError(
"unique_name_ cannot be used when eager exeuction is enabled.")
return EagerTemplate(
name_,
func_,
create_scope_now=create_scope_now_,
custom_getter=custom_getter_,
create_graph_function=create_graph_function_)
return Template(
name_,
func_,
create_scope_now=create_scope_now_,
unique_name=unique_name_,
custom_getter=custom_getter_,
create_graph_function=create_graph_function_)
def _skip_common_stack_elements(stacktrace, base_case):
"""Skips items that the target stacktrace shares with the base stacktrace."""
for i, (trace, base) in enumerate(zip(stacktrace, base_case)):
if trace != base:
return stacktrace[i:]
return stacktrace[-1:]
class Template(checkpointable.Checkpointable):
"""Wrap a function to aid in variable sharing.
Templates are functions that create variables the first time they are called
and reuse them thereafter. See `make_template` for full documentation.
Note: By default, the full variable scope is captured at the time of first
call. If `create_scope_now_` is passed as True to the constructor, the full
scope will be captured there, but no variables will created until the first
call.
"""
def __init__(self, name, func, create_scope_now=False, unique_name=None,
custom_getter=None, create_graph_function=False):
"""Creates a template for the given function.
Args:
name: A name for the scope created by this template. The
name will be made unique by appending `_N` to the it (see how
`tf.variable_scope` treats the `default_name` for details).
func: The function to apply each time.
create_scope_now: Whether to create the scope at Template construction
time, rather than first call. Defaults to false. Creating the scope at
construction time may be more convenient if the template is to passed
through much lower level code, and you want to be sure of the scope
name without knowing exactly where it will be first called. If set to
True, the scope will be created in the constructor, and all subsequent
times in `__call__`, leading to a trailing numeral being added to the
names of all created Tensors. If set to False, the scope will be created
at the first call location.
unique_name: When used, it overrides `name` and is not made unique. If a
template of the same scope/unique_name already exists and reuse is
false, an error is raised. Defaults to None.
custom_getter: optional custom getter to pass to `variable_scope()`
create_graph_function: When True, `func` will be executed as a graph
function. Enabling this flag gives the caller access to graph-function
semantics, i.e., accesses to variables are totally ordered and
side-effecting ops are not pruned.
Raises:
ValueError: if `name` is None.
"""
if create_graph_function:
self._func = function.defun(func)
else:
self._func = func
self._stacktrace = traceback.format_stack()[:-2]
self._name = name
self._unique_name = unique_name
self._custom_getter = custom_getter
if name is None:
raise ValueError("name cannot be None.")
if create_scope_now:
with variable_scope._pure_variable_scope( # pylint:disable=protected-access
(self._unique_name or
variable_scope._get_unique_variable_scope(self._name)), # pylint:disable=protected-access
custom_getter=self._custom_getter) as vs:
self._variable_scope = vs
else:
self._variable_scope = None
# This variable keeps track of whether the template has been called yet,
# which is not the same as whether the scope has been created.
self._variables_created = False
def _call_func(self, args, kwargs):
try:
vars_at_start = len(
ops.get_collection_ref(ops.GraphKeys.GLOBAL_VARIABLES))
trainable_at_start = len(
ops.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES))
if self._variables_created:
result = self._func(*args, **kwargs)
else:
# The first time we run, restore variables if necessary (via
# Checkpointable).
with checkpointable_util.capture_dependencies(template=self):
result = self._func(*args, **kwargs)
if self._variables_created:
# Variables were previously created, implying this is not the first
# time the template has been called. Check to make sure that no new
# trainable variables were created this time around.
trainable_variables = ops.get_collection_ref(
ops.GraphKeys.TRAINABLE_VARIABLES)
# If a variable that we intend to train is created as a side effect
# of creating a template, then that is almost certainly an error.
if trainable_at_start != len(trainable_variables):
raise ValueError("Trainable variable created when calling a template "
"after the first time, perhaps you used tf.Variable "
"when you meant tf.get_variable: %s" %
(trainable_variables[trainable_at_start:],))
# Non-trainable tracking variables are a legitimate reason why a new
# variable would be created, but it is a relatively advanced use-case,
# so log it.
variables = ops.get_collection_ref(ops.GraphKeys.GLOBAL_VARIABLES)
if vars_at_start != len(variables):
logging.info("New variables created when calling a template after "
"the first time, perhaps you used tf.Variable when you "
"meant tf.get_variable: %s",
variables[vars_at_start:])
else:
self._variables_created = True
return result
except Exception as exc:
# Reraise the exception, but append the original definition to the
# trace.
args = exc.args
if not args:
arg0 = ""
else:
arg0 = args[0]
trace = "".join(_skip_common_stack_elements(self._stacktrace,
traceback.format_stack()))
arg0 = "%s\n\noriginally defined at:\n%s" % (arg0, trace)
new_args = [arg0]
new_args.extend(args[1:])
exc.args = tuple(new_args)
raise
def __call__(self, *args, **kwargs):
if self._variable_scope:
# Only reuse variables if they were already created.
with variable_scope.variable_scope(
self._variable_scope, reuse=self._variables_created):
return self._call_func(args, kwargs)
else:
# The scope was not created at construction time, so create it here.
# Subsequent calls should reuse variables.
with variable_scope.variable_scope(
self._unique_name, self._name,
custom_getter=self._custom_getter) as vs:
self._variable_scope = vs
return self._call_func(args, kwargs)
@property
def name(self):
"""Returns the name given to this Template."""
return self._name
@property
def func(self):
"""Returns the func given to this Template."""
return self._func
@property
def variable_scope(self):
"""Returns the variable scope object created by this Template."""
return self._variable_scope
@property
def variable_scope_name(self):
"""Returns the variable scope name created by this Template."""
if self._variable_scope:
name = self._variable_scope.name
if not name or name[-1] == "/":
return name
else:
# To prevent partial matches on the scope_name, we add '/' at the end.
return name + "/"
@property
def variables(self):
"""Returns the list of global and local variables created by the Template.
"""
return self.global_variables + self.local_variables
@property
def trainable_variables(self):
"""Returns the list of trainable variables created by the Template."""
if self._variables_created:
return ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES,
self.variable_scope_name)
else:
return []
@property
def non_trainable_variables(self):
"""Returns the list of non-trainable variables created by the Template."""
# TODO(apassos) Make sure it matches Eager when using local variables.
global_variables = self.global_variables
trainable_variables = set(self.trainable_variables)
return [x for x in global_variables if x not in trainable_variables]
@property
def global_variables(self):
"""Returns the list of global variables created by the Template."""
if self._variables_created:
return ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
self.variable_scope_name)
else:
return []
@property
def local_variables(self):
"""Returns the list of global variables created by the Template."""
if self._variables_created:
return ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES,
self.variable_scope_name)
else:
return []
@property
def weights(self):
"""List of weights/variables created by the Template."""
return self.variables
@property
def trainable_weights(self):
"""List of trainable weights/variables created by the Template."""
return self.trainable_variables
@property
def non_trainable_weights(self):
"""List of non-trainable weights/variables created by the Template."""
return self.non_trainable_variables
@property
@deprecated(
"2017-02-21", "The .var_scope property is deprecated. Please change your "
"code to use the .variable_scope property")
def var_scope(self):
"""Returns the variable scope object created by this Template."""
return self._variable_scope
class _EagerTemplateVariableStore(object):
"""Wrapper around EagerVariableStore to support nesting EagerTemplates.
"""
def __init__(self, variable_scope_name):
self._variable_scope_name = variable_scope_name
default = variable_scope._get_default_variable_store() # pylint: disable=protected-access
if default._store_eager_variables: # pylint: disable=protected-access
self._eager_variable_store = variable_scope.EagerVariableStore(default)
else:
self._eager_variable_store = variable_scope.EagerVariableStore()
def set_variable_scope_name(self, variable_scope_name):
self._variable_scope_name = variable_scope_name
@tf_contextlib.contextmanager
def as_default(self):
try:
with self._eager_variable_store.as_default():
yield
finally:
# Each _EagerTemplateVariableStore object lives underneath a variable
# scope (see EagerTemplate.__call__). This variable scope's subscopes are
# closed when the EagerTemplate object returns from __call__. For
# top-level _EagerTemplateVariableStore objects, the variable store to
# which the variable scope is attached is different from the
# EagerVariableStore; as such it is necessary to close its subscopes
# here as well.
if self._variable_scope_name is None:
raise RuntimeError("A variable scope must be set before an "
"_EagerTemplateVariableStore object exits.")
variable_scope.get_variable_scope_store().close_variable_subscopes(
self._variable_scope_name)
def _variables_in_scope(self, variable_list):
if self._variable_scope_name is None:
raise RuntimeError(
"A variable scope must be set before variables can be accessed.")
return [
v for v in variable_list
if v.name.startswith(self._variable_scope_name + "/")
]
def variables(self):
return self._variables_in_scope(self._eager_variable_store.variables())
def trainable_variables(self):
return self._variables_in_scope(
self._eager_variable_store.trainable_variables())
def non_trainable_variables(self):
return self._variables_in_scope(
self._eager_variable_store.non_trainable_variables())
class EagerTemplate(Template):
"""Wrap a function to aid in variable sharing in Eager mode.
Templates are functions that create variables the first time they are called
and reuse them thereafter. See `make_template` for full documentation.
Note: By default, the full variable scope is captured at the time of first
call. If `create_scope_now` is passed as True to the constructor, the full
scope will be captured there, but no variables will be created until the first
call.
"""
def __init__(self, name, func, create_scope_now=False, custom_getter=None,
create_graph_function=False):
"""Creates a template for the given function.
Args:
name: A name for the scope created by this template. The
name will be made unique by appending `_N` to the it (see how
`tf.variable_scope` treats the `default_name` for details).
func: The function to apply each time.
create_scope_now: Whether to create the scope at Template construction
time, rather than first call. Defaults to false. Creating the scope at
construction time may be more convenient if the template is passed
through much lower level code, and you want to be sure of the scope
name without knowing exactly where it will be first called. If set to
True, the scope will be created in the constructor, and all subsequent
times in `__call__`, leading to a trailing numeral being added to the
names of all created Tensors. If set to False, the scope will be created
at the first call location.
custom_getter: optional custom getter to pass to `variable_scope()`
create_graph_function: When True, `func` will be executed as a graph
function. Enabling this flag allows the caller to reap the performance
benefits associated with executing graphs, at the cost of sacrificing
debuggability; however, not all Python functions can be compiled into
graph functions. See the documentation for `function.defun` for details.
Raises:
RuntimeError: if eager execution is not enabled.
"""
if not context.executing_eagerly():
raise RuntimeError(
"{} objects can only be used when eager execution is enabled, use "
"tf.Template for graph construction".
format(type(self)))
super(EagerTemplate, self).__init__(name, func, create_scope_now, None,
custom_getter, create_graph_function)
if self._variable_scope is not None:
variable_scope_name = self._variable_scope.name
else:
# Defer setting the variable scope name until the variable scope
# is created in __call__.
variable_scope_name = None
self._template_store = _EagerTemplateVariableStore(variable_scope_name)
self._variable_scope_context_manager = None
def _call_func(self, args, kwargs):
try:
vars_at_start = self._template_store.variables()
trainable_at_start = self._template_store.trainable_variables()
if self._variables_created:
result = self._func(*args, **kwargs)
else:
# The first time we run, restore variables if necessary (via
# Checkpointable).
with checkpointable_util.capture_dependencies(template=self):
result = self._func(*args, **kwargs)
if self._variables_created:
# Variables were previously created, implying this is not the first
# time the template has been called. Check to make sure that no new
# trainable variables were created this time around.
trainable_variables = self._template_store.trainable_variables()
# If a variable that we intend to train is created as a side effect
# of creating a template, then that is almost certainly an error.
if len(trainable_at_start) != len(trainable_variables):
raise ValueError("Trainable variable created when calling a template "
"after the first time, perhaps you used tf.Variable "
"when you meant tf.get_variable: %s" %
list(set(trainable_variables) -
set(trainable_at_start)))
# Non-trainable tracking variables are a legitimate reason why a new
# variable would be created, but it is a relatively advanced use-case,
# so log it.
variables = self._template_store.variables()
if len(vars_at_start) != len(variables):
logging.info("New variables created when calling a template after "
"the first time, perhaps you used tf.Variable when you "
"meant tf.get_variable: %s",
list(set(variables) - set(vars_at_start)))
else:
self._variables_created = True
return result
except Exception as exc:
# Reraise the exception, but append the original definition to the
# trace.
args = exc.args
if not args:
arg0 = ""
else:
arg0 = args[0]
trace = "".join(_skip_common_stack_elements(self._stacktrace,
traceback.format_stack()))
arg0 = "%s\n\noriginally defined at:\n%s" % (arg0, trace)
new_args = [arg0]
new_args.extend(args[1:])
exc.args = tuple(new_args)
raise
def __call__(self, *args, **kwargs):
# In both branches below, the template store is installed as default after
# the variable scope is opened in order to ensure that templates nested at
# the same level correctly uniquify lower variable scope names.
if self._variable_scope:
# Create a cache for the variable scope context manager the first time
# around so that we don't have to keep recreating it.
if not self._variable_scope_context_manager:
self._variable_scope_context_manager = variable_scope.variable_scope(
self._variable_scope, reuse=variable_scope.AUTO_REUSE)
with self._variable_scope_context_manager:
with self._template_store.as_default():
return self._call_func(args, kwargs)
else:
# The scope was not created at construction time, so create it here.
# Subsequent calls should reuse variables.
with variable_scope.variable_scope(
self._unique_name, self._name,
custom_getter=self._custom_getter) as vs:
self._variable_scope = vs
# Because the scope was not created at construction time, the template
# store's variable scope name is unset; set it here.
self._template_store.set_variable_scope_name(vs.name)
with self._template_store.as_default():
return self._call_func(args, kwargs)
@property
def variables(self):
"""Returns the list of variables created by the Template."""
# Currently there is no local variable in Eager mode.
if not self._variables_created:
return []
return self._template_store.variables()
@property
def trainable_variables(self):
"""Returns the list of trainable variables created by the Template."""
# Currently there is no local variable in Eager mode.
if not self._variables_created:
return []
return self._template_store.trainable_variables()
@property
def non_trainable_variables(self):
"""Returns the list of non-trainable variables created by the Template."""
# Currently there is no local variable in Eager mode.
if not self._variables_created:
return []
return self._template_store.non_trainable_variables()
@property
def global_variables(self):
"""Returns the list of global variables created by the Template."""
# Currently there is no local variable in Eager mode.
if not self._variables_created:
return []
return self.variables
@property
def local_variables(self):
"""Returns the list of global variables created by the Template."""
# Currently there is no local variable in Eager mode.
return []
| apache-2.0 |
ykim362/mxnet | example/speech_recognition/config_util.py | 52 | 3911 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import os
import re
import sys
import mxnet as mx
if sys.version_info >= (3, 0):
import configparser
else:
import ConfigParser as configparser
def parse_args(file_path):
default_cfg = configparser.ConfigParser()
default_cfg.read(file_path)
parser = argparse.ArgumentParser()
parser.add_argument("--configfile", help="config file")
parser.add_argument("--archfile", help="symbol architecture template file")
# those allow us to overwrite the configs through command line
for sec in default_cfg.sections():
for name, _ in default_cfg.items(sec):
arg_name = '--%s_%s' % (sec, name)
doc = 'Overwrite %s in section [%s] of config file' % (name, sec)
parser.add_argument(arg_name, help=doc)
args = parser.parse_args()
if args.configfile is not None:
# now read the user supplied config file to overwrite some values
default_cfg.read(args.configfile)
# now overwrite config from command line options
for sec in default_cfg.sections():
for name, _ in default_cfg.items(sec):
arg_name = ('%s_%s' % (sec, name)).replace('-', '_')
if hasattr(args, arg_name) and getattr(args, arg_name) is not None:
sys.stderr.write('!! CMDLine overwriting %s.%s:\n' % (sec, name))
sys.stderr.write(" '%s' => '%s'\n" % (default_cfg.get(sec, name),
getattr(args, arg_name)))
default_cfg.set(sec, name, getattr(args, arg_name))
args.config = default_cfg
sys.stderr.write("=" * 80 + "\n")
# set archfile to read template of network
if args.archfile is not None:
# now read the user supplied config file to overwrite some values
args.config.set('arch', 'arch_file', args.archfile)
else:
args.config.set('arch', 'arch_file', 'arch_deepspeech')
return args
def get_log_path(args):
mode = args.config.get('mode', 'method')
prefix = args.config.get(mode, 'prefix')
if os.path.isabs(prefix):
return prefix
return os.path.abspath(os.path.join(os.path.dirname('__file__'), 'log', prefix, mode))
def get_checkpoint_path(args):
prefix = args.config.get('common', 'prefix')
if os.path.isabs(prefix):
return prefix
return os.path.abspath(os.path.join(os.path.dirname('__file__'), 'checkpoints', prefix))
def parse_contexts(args):
# parse context into Context objects
contexts = re.split(r'\W+', args.config.get('common', 'context'))
for i, ctx in enumerate(contexts):
if ctx[:3] == 'gpu':
contexts[i] = mx.context.gpu(int(ctx[3:]))
else:
contexts[i] = mx.context.cpu(int(ctx[3:]))
return contexts
def generate_file_path(save_dir, model_name, postfix):
if os.path.isabs(model_name):
return os.path.join(model_name, postfix)
# if it is not a full path it stores under (its project path)/save_dir/model_name/postfix
return os.path.abspath(os.path.join(os.path.dirname(__file__), save_dir, model_name + '_' + postfix))
| apache-2.0 |
pedrobaeza/OpenUpgrade | addons/account/wizard/pos_box.py | 185 | 3431 | from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class CashBox(osv.osv_memory):
_register = False
_columns = {
'name' : fields.char('Reason', required=True),
# Attention, we don't set a domain, because there is a journal_type key
# in the context of the action
'amount' : fields.float('Amount',
digits_compute = dp.get_precision('Account'),
required=True),
}
def run(self, cr, uid, ids, context=None):
if not context:
context = dict()
active_model = context.get('active_model', False) or False
active_ids = context.get('active_ids', []) or []
records = self.pool[active_model].browse(cr, uid, active_ids, context=context)
return self._run(cr, uid, ids, records, context=context)
def _run(self, cr, uid, ids, records, context=None):
for box in self.browse(cr, uid, ids, context=context):
for record in records:
if not record.journal_id:
raise osv.except_osv(_('Error!'),
_("Please check that the field 'Journal' is set on the Bank Statement"))
if not record.journal_id.internal_account_id:
raise osv.except_osv(_('Error!'),
_("Please check that the field 'Internal Transfers Account' is set on the payment method '%s'.") % (record.journal_id.name,))
self._create_bank_statement_line(cr, uid, box, record, context=context)
return {}
def _create_bank_statement_line(self, cr, uid, box, record, context=None):
values = self._compute_values_for_statement_line(cr, uid, box, record, context=context)
return self.pool.get('account.bank.statement.line').create(cr, uid, values, context=context)
class CashBoxIn(CashBox):
_name = 'cash.box.in'
_columns = CashBox._columns.copy()
_columns.update({
'ref': fields.char('Reference'),
})
def _compute_values_for_statement_line(self, cr, uid, box, record, context=None):
if not record.journal_id.internal_account_id.id:
raise osv.except_osv(_('Configuration Error'), _("You should have defined an 'Internal Transfer Account' in your cash register's journal!"))
return {
'statement_id': record.id,
'journal_id': record.journal_id.id,
'amount': box.amount or 0.0,
'account_id': record.journal_id.internal_account_id.id,
'ref': '%s' % (box.ref or ''),
'name': box.name,
}
class CashBoxOut(CashBox):
_name = 'cash.box.out'
_columns = CashBox._columns.copy()
def _compute_values_for_statement_line(self, cr, uid, box, record, context=None):
if not record.journal_id.internal_account_id.id:
raise osv.except_osv(_('Configuration Error'), _("You should have defined an 'Internal Transfer Account' in your cash register's journal!"))
amount = box.amount or 0.0
return {
'statement_id': record.id,
'journal_id': record.journal_id.id,
'amount': -amount if amount > 0.0 else amount,
'account_id': record.journal_id.internal_account_id.id,
'name': box.name,
}
| agpl-3.0 |
lexor90/node-compiler | node/deps/v8/tools/release/check_clusterfuzz.py | 10 | 5501 | #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Script to check for new clusterfuzz issues since the last rolled v8 revision.
Returns a json list with test case IDs if any.
Security considerations: The security key and request data must never be
written to public logs. Public automated callers of this script should
suppress stdout and stderr and only process contents of the results_file.
"""
import argparse
import httplib
import json
import os
import re
import sys
import urllib
import urllib2
# Constants to git repos.
BASE_URL = "https://chromium.googlesource.com"
DEPS_LOG = BASE_URL + "/chromium/src/+log/master/DEPS?format=JSON"
# Constants for retrieving v8 rolls.
CRREV = "https://cr-rev.appspot.com/_ah/api/crrev/v1/commit/%s"
V8_COMMIT_RE = re.compile(
r"^Update V8 to version \d+\.\d+\.\d+ \(based on ([a-fA-F0-9]+)\)\..*")
# Constants for the clusterfuzz backend.
HOSTNAME = "backend-dot-cluster-fuzz.appspot.com"
# Crash patterns.
V8_INTERNAL_RE = re.compile(r"^v8::internal.*")
ANY_RE = re.compile(r".*")
# List of all api requests.
BUG_SPECS = [
{
"args": {
"job_type": "linux_asan_chrome_v8",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": V8_INTERNAL_RE,
},
{
"args": {
"job_type": "linux_asan_d8",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_ignition_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_v8_arm_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_ignition_v8_arm_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_v8_arm64_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
{
"args": {
"job_type": "linux_asan_d8_v8_mipsel_dbg",
"reproducible": "True",
"open": "True",
"bug_information": "",
},
"crash_state": ANY_RE,
},
]
def GetRequest(url):
url_fh = urllib2.urlopen(url, None, 60)
try:
return url_fh.read()
finally:
url_fh.close()
def GetLatestV8InChromium():
"""Returns the commit position number of the latest v8 roll in chromium."""
# Check currently rolled v8 revision.
result = GetRequest(DEPS_LOG)
if not result:
return None
# Strip security header and load json.
commits = json.loads(result[5:])
git_revision = None
for commit in commits["log"]:
# Get latest commit that matches the v8 roll pattern. Ignore cherry-picks.
match = re.match(V8_COMMIT_RE, commit["message"])
if match:
git_revision = match.group(1)
break
else:
return None
# Get commit position number for v8 revision.
result = GetRequest(CRREV % git_revision)
if not result:
return None
commit = json.loads(result)
assert commit["repo"] == "v8/v8"
return commit["number"]
def APIRequest(key, **params):
"""Send a request to the clusterfuzz api.
Returns a json dict of the response.
"""
params["api_key"] = key
params = urllib.urlencode(params)
headers = {"Content-type": "application/x-www-form-urlencoded"}
try:
conn = httplib.HTTPSConnection(HOSTNAME)
conn.request("POST", "/_api/", params, headers)
response = conn.getresponse()
# Never leak "data" into public logs.
data = response.read()
except:
raise Exception("ERROR: Connection problem.")
try:
return json.loads(data)
except:
raise Exception("ERROR: Could not read response. Is your key valid?")
return None
def Main():
parser = argparse.ArgumentParser()
parser.add_argument("-k", "--key-file", required=True,
help="A file with the clusterfuzz api key.")
parser.add_argument("-r", "--results-file",
help="A file to write the results to.")
options = parser.parse_args()
# Get api key. The key's content must never be logged.
assert options.key_file
with open(options.key_file) as f:
key = f.read().strip()
assert key
revision_number = GetLatestV8InChromium()
results = []
for spec in BUG_SPECS:
args = dict(spec["args"])
# Use incremented revision as we're interested in all revision greater than
# what's currently rolled into chromium.
if revision_number:
args["revision_greater_or_equal"] = str(int(revision_number) + 1)
# Never print issue details in public logs.
issues = APIRequest(key, **args)
assert issues is not None
for issue in issues:
if re.match(spec["crash_state"], issue["crash_state"]):
results.append(issue["id"])
if options.results_file:
with open(options.results_file, "w") as f:
f.write(json.dumps(results))
else:
print results
if __name__ == "__main__":
sys.exit(Main())
| mit |
zouppen/simulavr | regress/timertest/timer_16bit_normal.py | 5 | 2481 | from vcdtestutil import VCDTestCase, VCDTestLoader, mSec
class TestCase(VCDTestCase):
p2irq = {
"atmega128": "IRQ.VECTOR14",
"at90s4433": "IRQ.VECTOR5",
"at90s8515": "IRQ.VECTOR6",
"atmega48": "IRQ.VECTOR13",
}
def setUp(self):
self.getVCD()
self.setClock(4000000)
self.processor = self.getProcessorType()
self.tov1 = self.p2irq[self.processor]
def test_00(self):
"""simulation time [0..40ms]"""
self.assertVCD()
self.assertEqual(self.vcd.starttime, 0)
self.assertEqual(self.vcd.endtime, 40 * mSec)
def test_01(self):
"""init counter"""
self.assertVCD()
p = self.getVariable("TIMER1.TCNTH")
self.assertEqual(p.firstedge.intValue, 0)
p = self.getVariable("TIMER1.TCNTL")
self.assertEqual(p.firstedge.intValue, 0)
def test_02(self):
"""counter period = 0,25us"""
self.assertVCD()
c = self.getVariable("TIMER1.Counter")
c1 = c.firstedge
tp = self.tClock
t0 = c1.internalTime - tp
dtc = tp * 65536
self.assertEqual(c1.intValue, 1)
c2 = c.getNextEdge(c1)
self.assertEqual(c2.intValue, 2)
self.assertEqual(c2.internalTime - c1.internalTime, tp)
def test_03(self):
"""counter mode: count 0xffff, then 0"""
self.assertVCD()
c = self.getVariable("TIMER1.Counter")
c1 = c.firstedge
tp = self.tClock
t0 = c1.internalTime - tp
dtc = tp * 65536
c2 = c.getNextEdge(t0 + dtc)
self.assertEqual(c2.intValue, 0)
def test_04(self):
"""check occurence of TOV1 interrupt"""
self.assertVCD()
ctr = self.getVariable("TIMER1.Counter")
tp = self.tClock
t0 = ctr.firstedge.internalTime - tp
dtc = tp * 65536
idelay = 6 * self.tClock
irq = self.getVariable(self.tov1)
# first overflow
t = t0 + dtc
ce = ctr.getNextEdge(t)
self.assertEqual(ce.internalTime, t)
self.assertEqual(ce.intValue, 0)
# check, when interrupt occurs
ie = irq.getNextEdge(t)
self.assertEqual(ie.intValue, 1)
self.assertTrue(ie.internalTime <= (t + idelay), "TOV1 occured to late")
# seek next TOV1
ie = irq.getNextEdge(irq.getNextEdge(ie))
self.assertTrue(ie.internalTime <= (t + dtc + idelay), "second TOV1 occured to late")
if __name__ == '__main__':
from unittest import TestLoader, TextTestRunner
tests = VCDTestLoader("timer_16bit_normal_atmega128.vcd").loadTestsFromTestCase(TestCase)
TextTestRunner(verbosity = 2).run(tests)
# EOF
| gpl-2.0 |
x75/mavlink | pymavlink/generator/lib/minixsv/minixsvWrapper.py | 3 | 2396 | #!/usr/local/bin/python
import sys
import getopt
from genxmlif import GenXmlIfError
from xsvalErrorHandler import ErrorHandler, XsvalError
from minixsv import *
from pyxsval import parseAndValidate
##########################################
# minixsv Wrapper for calling minixsv from command line
validSyntaxText = '''\
minixsv XML Schema Validator
Syntax: minixsv [-h] [-?] [-p Parser] [-s XSD-Filename] XML-Filename
Options:
-h, -?: Display this help text
-p Parser: XML Parser to be used
(XMLIF_MINIDOM, XMLIF_ELEMENTTREE, XMLIF_4DOM
default: XMLIF_ELEMENTTREE)
-s XSD-FileName: specify the schema file for validation
(if not specified in XML-File)
'''
def checkShellInputParameter():
"""check shell input parameters."""
xmlInputFilename = None
xsdFilename = None
xmlParser = "XMLIF_ELEMENTTREE"
try:
(options, arguments) = getopt.getopt(sys.argv[1:], '?hp:s:')
if ('-?','') in options or ('-h','') in options:
print validSyntaxText
sys.exit(-1)
else:
if len (arguments) == 1:
xmlInputFilename = arguments[0]
for o, a in options:
if o == "-s":
xsdFilename = a
if o == "-p":
if a in (XMLIF_MINIDOM, XMLIF_ELEMENTTREE, XMLIF_4DOM):
xmlParser = a
else:
print 'Invalid XML parser %s!' %(a)
sys.exit(-1)
else:
print 'minixsv needs one argument (XML input file)!'
sys.exit(-1)
except getopt.GetoptError, errstr:
print errstr
sys.exit(-1)
return xmlInputFilename, xsdFilename, xmlParser
def main():
xmlInputFilename, xsdFileName, xmlParser = checkShellInputParameter()
try:
parseAndValidate (xmlInputFilename, xsdFile=xsdFileName, xmlIfClass=xmlParser)
except IOError, errstr:
print errstr
sys.exit(-1)
except GenXmlIfError, errstr:
print errstr
sys.exit(-1)
except XsvalError, errstr:
print errstr
sys.exit(-1)
if __name__ == "__main__":
main()
| lgpl-3.0 |
encukou/freeipa | ipaserver/plugins/dns.py | 1 | 166682 | # Authors:
# Martin Kosek <mkosek@redhat.com>
# Pavel Zuna <pzuna@redhat.com>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import logging
import netaddr
import time
import re
import binascii
import encodings.idna
import dns.name
import dns.exception
import dns.rdatatype
import dns.resolver
import six
from ipalib.dns import (extra_name_format,
get_extra_rrtype,
get_part_rrtype,
get_record_rrtype,
get_rrparam_from_part,
has_cli_options,
iterate_rrparams_by_parts,
part_name_format,
record_name_format)
from ipalib.frontend import Method, Object
from ipalib.request import context
from ipalib import api, errors, output
from ipalib import Command
from ipalib.capabilities import (
VERSION_WITHOUT_CAPABILITIES,
client_has_capability)
from ipalib.parameters import (Flag, Bool, Int, Decimal, Str, StrEnum, Any,
DNSNameParam)
from ipalib.plugable import Registry
from .baseldap import (
pkey_to_value,
LDAPObject,
LDAPCreate,
LDAPUpdate,
LDAPSearch,
LDAPQuery,
LDAPDelete,
LDAPRetrieve)
from ipalib import _
from ipalib import messages
from ipalib.util import (normalize_zonemgr,
get_dns_forward_zone_update_policy,
get_dns_reverse_zone_update_policy,
get_reverse_zone_default, REVERSE_DNS_ZONES,
normalize_zone, validate_dnssec_global_forwarder,
DNSSECSignatureMissingError, UnresolvableRecordError,
EDNS0UnsupportedError, DNSSECValidationError,
validate_dnssec_zone_forwarder_step1,
validate_dnssec_zone_forwarder_step2,
verify_host_resolvable,
validate_bind_forwarder,
ipaddr_validator)
from ipaplatform import services
from ipapython.dn import DN
from ipapython.ipautil import CheckedIPAddress
from ipapython.dnsutil import (
check_zone_overlap,
DNSName,
DNSResolver,
DNSZoneAlreadyExists,
related_to_auto_empty_zone,
resolve,
zone_for_name,
)
from ipaserver.dns_data_management import (
IPASystemRecords,
IPADomainIsNotManagedByIPAError,
)
from ipaserver.masters import find_providing_servers, is_service_enabled
if six.PY3:
unicode = str
__doc__ = _("""
Domain Name System (DNS)
""") + _("""
Manage DNS zone and resource records.
""") + _("""
SUPPORTED ZONE TYPES
* Master zone (dnszone-*), contains authoritative data.
* Forward zone (dnsforwardzone-*), forwards queries to configured forwarders
(a set of DNS servers).
""") + _("""
USING STRUCTURED PER-TYPE OPTIONS
""") + _("""
There are many structured DNS RR types where DNS data stored in LDAP server
is not just a scalar value, for example an IP address or a domain name, but
a data structure which may be often complex. A good example is a LOC record
[RFC1876] which consists of many mandatory and optional parts (degrees,
minutes, seconds of latitude and longitude, altitude or precision).
""") + _("""
It may be difficult to manipulate such DNS records without making a mistake
and entering an invalid value. DNS module provides an abstraction over these
raw records and allows to manipulate each RR type with specific options. For
each supported RR type, DNS module provides a standard option to manipulate
a raw records with format --<rrtype>-rec, e.g. --mx-rec, and special options
for every part of the RR structure with format --<rrtype>-<partname>, e.g.
--mx-preference and --mx-exchanger.
""") + _("""
When adding a record, either RR specific options or standard option for a raw
value can be used, they just should not be combined in one add operation. When
modifying an existing entry, new RR specific options can be used to change
one part of a DNS record, where the standard option for raw value is used
to specify the modified value. The following example demonstrates
a modification of MX record preference from 0 to 1 in a record without
modifying the exchanger:
ipa dnsrecord-mod --mx-rec="0 mx.example.com." --mx-preference=1
""") + _("""
EXAMPLES:
""") + _("""
Add new zone:
ipa dnszone-add example.com --admin-email=admin@example.com
""") + _("""
Add system permission that can be used for per-zone privilege delegation:
ipa dnszone-add-permission example.com
""") + _("""
Modify the zone to allow dynamic updates for hosts own records in realm EXAMPLE.COM:
ipa dnszone-mod example.com --dynamic-update=TRUE
""") + _("""
This is the equivalent of:
ipa dnszone-mod example.com --dynamic-update=TRUE \\
--update-policy="grant EXAMPLE.COM krb5-self * A; grant EXAMPLE.COM krb5-self * AAAA; grant EXAMPLE.COM krb5-self * SSHFP;"
""") + _("""
Modify the zone to allow zone transfers for local network only:
ipa dnszone-mod example.com --allow-transfer=192.0.2.0/24
""") + _("""
Add new reverse zone specified by network IP address:
ipa dnszone-add --name-from-ip=192.0.2.0/24
""") + _("""
Add second nameserver for example.com:
ipa dnsrecord-add example.com @ --ns-rec=nameserver2.example.com
""") + _("""
Add a mail server for example.com:
ipa dnsrecord-add example.com @ --mx-rec="10 mail1"
""") + _("""
Add another record using MX record specific options:
ipa dnsrecord-add example.com @ --mx-preference=20 --mx-exchanger=mail2
""") + _("""
Add another record using interactive mode (started when dnsrecord-add, dnsrecord-mod,
or dnsrecord-del are executed with no options):
ipa dnsrecord-add example.com @
Please choose a type of DNS resource record to be added
The most common types for this type of zone are: NS, MX, LOC
DNS resource record type: MX
MX Preference: 30
MX Exchanger: mail3
Record name: example.com
MX record: 10 mail1, 20 mail2, 30 mail3
NS record: nameserver.example.com., nameserver2.example.com.
""") + _("""
Delete previously added nameserver from example.com:
ipa dnsrecord-del example.com @ --ns-rec=nameserver2.example.com.
""") + _("""
Add LOC record for example.com:
ipa dnsrecord-add example.com @ --loc-rec="49 11 42.4 N 16 36 29.6 E 227.64m"
""") + _("""
Add new A record for www.example.com. Create a reverse record in appropriate
reverse zone as well. In this case a PTR record "2" pointing to www.example.com
will be created in zone 2.0.192.in-addr.arpa.
ipa dnsrecord-add example.com www --a-rec=192.0.2.2 --a-create-reverse
""") + _("""
Add new PTR record for www.example.com
ipa dnsrecord-add 2.0.192.in-addr.arpa. 2 --ptr-rec=www.example.com.
""") + _("""
Add new SRV records for LDAP servers. Three quarters of the requests
should go to fast.example.com, one quarter to slow.example.com. If neither
is available, switch to backup.example.com.
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="0 3 389 fast.example.com"
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="0 1 389 slow.example.com"
ipa dnsrecord-add example.com _ldap._tcp --srv-rec="1 1 389 backup.example.com"
""") + _("""
The interactive mode can be used for easy modification:
ipa dnsrecord-mod example.com _ldap._tcp
No option to modify specific record provided.
Current DNS record contents:
SRV record: 0 3 389 fast.example.com, 0 1 389 slow.example.com, 1 1 389 backup.example.com
Modify SRV record '0 3 389 fast.example.com'? Yes/No (default No):
Modify SRV record '0 1 389 slow.example.com'? Yes/No (default No): y
SRV Priority [0]: (keep the default value)
SRV Weight [1]: 2 (modified value)
SRV Port [389]: (keep the default value)
SRV Target [slow.example.com]: (keep the default value)
1 SRV record skipped. Only one value per DNS record type can be modified at one time.
Record name: _ldap._tcp
SRV record: 0 3 389 fast.example.com, 1 1 389 backup.example.com, 0 2 389 slow.example.com
""") + _("""
After this modification, three fifths of the requests should go to
fast.example.com and two fifths to slow.example.com.
""") + _("""
An example of the interactive mode for dnsrecord-del command:
ipa dnsrecord-del example.com www
No option to delete specific record provided.
Delete all? Yes/No (default No): (do not delete all records)
Current DNS record contents:
A record: 192.0.2.2, 192.0.2.3
Delete A record '192.0.2.2'? Yes/No (default No):
Delete A record '192.0.2.3'? Yes/No (default No): y
Record name: www
A record: 192.0.2.2 (A record 192.0.2.3 has been deleted)
""") + _("""
Show zone example.com:
ipa dnszone-show example.com
""") + _("""
Find zone with "example" in its domain name:
ipa dnszone-find example
""") + _("""
Find records for resources with "www" in their name in zone example.com:
ipa dnsrecord-find example.com www
""") + _("""
Find A records with value 192.0.2.2 in zone example.com
ipa dnsrecord-find example.com --a-rec=192.0.2.2
""") + _("""
Show records for resource www in zone example.com
ipa dnsrecord-show example.com www
""") + _("""
Delegate zone sub.example to another nameserver:
ipa dnsrecord-add example.com ns.sub --a-rec=203.0.113.1
ipa dnsrecord-add example.com sub --ns-rec=ns.sub.example.com.
""") + _("""
Delete zone example.com with all resource records:
ipa dnszone-del example.com
""") + _("""
If a global forwarder is configured, all queries for which this server is not
authoritative (e.g. sub.example.com) will be routed to the global forwarder.
Global forwarding configuration can be overridden per-zone.
""") + _("""
Semantics of forwarding in IPA matches BIND semantics and depends on the type
of zone:
* Master zone: local BIND replies authoritatively to queries for data in
the given zone (including authoritative NXDOMAIN answers) and forwarding
affects only queries for names below zone cuts (NS records) of locally
served zones.
* Forward zone: forward zone contains no authoritative data. BIND forwards
queries, which cannot be answered from its local cache, to configured
forwarders.
""") + _("""
Semantics of the --forward-policy option:
* none - disable forwarding for the given zone.
* first - forward all queries to configured forwarders. If they fail,
do resolution using DNS root servers.
* only - forward all queries to configured forwarders and if they fail,
return failure.
""") + _("""
Disable global forwarding for given sub-tree:
ipa dnszone-mod example.com --forward-policy=none
""") + _("""
This configuration forwards all queries for names outside the example.com
sub-tree to global forwarders. Normal recursive resolution process is used
for names inside the example.com sub-tree (i.e. NS records are followed etc.).
""") + _("""
Forward all requests for the zone external.example.com to another forwarder
using a "first" policy (it will send the queries to the selected forwarder
and if not answered it will use global root servers):
ipa dnsforwardzone-add external.example.com --forward-policy=first \\
--forwarder=203.0.113.1
""") + _("""
Change forward-policy for external.example.com:
ipa dnsforwardzone-mod external.example.com --forward-policy=only
""") + _("""
Show forward zone external.example.com:
ipa dnsforwardzone-show external.example.com
""") + _("""
List all forward zones:
ipa dnsforwardzone-find
""") + _("""
Delete forward zone external.example.com:
ipa dnsforwardzone-del external.example.com
""") + _("""
Resolve a host name to see if it exists (will add default IPA domain
if one is not included):
ipa dns-resolve www.example.com
ipa dns-resolve www
""") + _("""
GLOBAL DNS CONFIGURATION
""") + _("""
DNS configuration passed to command line install script is stored in a local
configuration file on each IPA server where DNS service is configured. These
local settings can be overridden with a common configuration stored in LDAP
server:
""") + _("""
Show global DNS configuration:
ipa dnsconfig-show
""") + _("""
Modify global DNS configuration and set a list of global forwarders:
ipa dnsconfig-mod --forwarder=203.0.113.113
""")
logger = logging.getLogger(__name__)
register = Registry()
# supported resource record types
_record_types = (
u'A', u'AAAA', u'A6', u'AFSDB', u'APL', u'CERT', u'CNAME', u'DHCID', u'DLV',
u'DNAME', u'DS', u'HIP', u'HINFO', u'IPSECKEY', u'KEY', u'KX', u'LOC',
u'MD', u'MINFO', u'MX', u'NAPTR', u'NS', u'NSEC', u'NXT', u'PTR', u'RRSIG',
u'RP', u'SIG', u'SPF', u'SRV', u'SSHFP', u'TLSA', u'TXT', u"URI"
)
# DNS zone record identificator
_dns_zone_record = DNSName.empty
# attributes derived from record types
_record_attributes = [str(record_name_format % t.lower())
for t in _record_types]
# Deprecated
# supported DNS classes, IN = internet, rest is almost never used
_record_classes = (u'IN', u'CS', u'CH', u'HS')
# IN record class
_IN = dns.rdataclass.IN
# NS record type
_NS = dns.rdatatype.from_text('NS')
_output_permissions = (
output.summary,
output.Output('result', bool, _('True means the operation was successful')),
output.Output('value', unicode, _('Permission value')),
)
def _rname_validator(ugettext, zonemgr):
try:
DNSName(zonemgr) # test only if it is valid domain name
except (ValueError, dns.exception.SyntaxError) as e:
return unicode(e)
return None
def _create_zone_serial():
"""
Generate serial number for zones. bind-dyndb-ldap expects unix time in
to be used for SOA serial.
SOA serial in a date format would also work, but it may be set to far
future when many DNS updates are done per day (more than 100). Unix
timestamp is more resilient to this issue.
"""
return int(time.time())
def _reverse_zone_name(netstr):
try:
netaddr.IPAddress(str(netstr))
except (netaddr.AddrFormatError, ValueError):
pass
else:
# use more sensible default prefix than netaddr default
return unicode(get_reverse_zone_default(netstr))
net = netaddr.IPNetwork(netstr)
items = net.ip.reverse_dns.split('.')
if net.version == 4:
return u'.'.join(items[4 - net.prefixlen // 8:])
elif net.version == 6:
return u'.'.join(items[32 - net.prefixlen // 4:])
else:
return None
def _validate_ip4addr(ugettext, ipaddr):
return ipaddr_validator(ugettext, ipaddr, 4)
def _validate_ip6addr(ugettext, ipaddr):
return ipaddr_validator(ugettext, ipaddr, 6)
def _validate_ipnet(ugettext, ipnet):
try:
netaddr.IPNetwork(ipnet)
except (netaddr.AddrFormatError, ValueError, UnboundLocalError):
return _('invalid IP network format')
return None
def _validate_bind_aci(ugettext, bind_acis):
if not bind_acis:
return None
bind_acis = bind_acis.split(';')
if bind_acis[-1]:
return _('each ACL element must be terminated with a semicolon')
else:
bind_acis.pop(-1)
for bind_aci in bind_acis:
if bind_aci in ("any", "none", "localhost", "localnets"):
continue
if bind_aci.startswith('!'):
bind_aci = bind_aci[1:]
try:
CheckedIPAddress(bind_aci, parse_netmask=True, allow_loopback=True)
except (netaddr.AddrFormatError, ValueError) as e:
return unicode(e)
except UnboundLocalError:
return _(u"invalid address format")
return None
def _normalize_bind_aci(bind_acis):
if not bind_acis:
return None
bind_acis = bind_acis.split(';')
normalized = []
for bind_aci in bind_acis:
if not bind_aci:
continue
if bind_aci in ("any", "none", "localhost", "localnets"):
normalized.append(bind_aci)
continue
prefix = ""
if bind_aci.startswith('!'):
bind_aci = bind_aci[1:]
prefix = "!"
try:
ip = CheckedIPAddress(bind_aci, parse_netmask=True,
allow_loopback=True)
if '/' in bind_aci: # addr with netmask
netmask = "/%s" % ip.prefixlen
else:
netmask = ""
normalized.append(u"%s%s%s" % (prefix, str(ip), netmask))
continue
except Exception:
normalized.append(bind_aci)
continue
acis = u';'.join(normalized)
acis += u';'
return acis
def _validate_nsec3param_record(ugettext, value):
_nsec3param_pattern = (r'^(?P<alg>\d+) (?P<flags>\d+) (?P<iter>\d+) '
r'(?P<salt>([0-9a-fA-F]{2})+|-)$')
rec = re.compile(_nsec3param_pattern, flags=re.U)
result = rec.match(value)
if result is None:
return _(u'expected format: <0-255> <0-255> <0-65535> '
'even-length_hexadecimal_digits_or_hyphen')
alg = int(result.group('alg'))
flags = int(result.group('flags'))
iterations = int(result.group('iter'))
salt = result.group('salt')
if alg > 255:
return _('algorithm value: allowed interval 0-255')
if flags > 255:
return _('flags value: allowed interval 0-255')
if iterations > 65535:
return _('iterations value: allowed interval 0-65535')
if salt == u'-':
return None
try:
binascii.a2b_hex(salt)
except TypeError as e:
return _('salt value: %(err)s') % {'err': e}
return None
def _hostname_validator(ugettext, value):
assert isinstance(value, DNSName)
if len(value.make_absolute().labels) < 3:
return _('invalid domain-name: not fully qualified')
return None
def _no_wildcard_validator(ugettext, value):
"""Disallow usage of wildcards as RFC 4592 section 4 recommends
"""
assert isinstance(value, DNSName)
if value.is_wild():
return _('should not be a wildcard domain name (RFC 4592 section 4)')
return None
def is_forward_record(zone, str_address):
addr = netaddr.IPAddress(str_address)
if addr.version == 4:
result = api.Command['dnsrecord_find'](zone, arecord=str_address)
elif addr.version == 6:
result = api.Command['dnsrecord_find'](zone, aaaarecord=str_address)
else:
raise ValueError('Invalid address family')
return result['count'] > 0
def add_forward_record(zone, name, str_address):
addr = netaddr.IPAddress(str_address)
try:
if addr.version == 4:
api.Command['dnsrecord_add'](zone, name, arecord=str_address)
elif addr.version == 6:
api.Command['dnsrecord_add'](zone, name, aaaarecord=str_address)
else:
raise ValueError('Invalid address family')
except errors.EmptyModlist:
pass # the entry already exists and matches
def get_reverse_zone(ipaddr):
"""
resolve the reverse zone for IP address and see if it is managed by IPA
server
:param ipaddr: host IP address
:return: tuple containing name of the reverse zone and the name of the
record
"""
ip = netaddr.IPAddress(str(ipaddr))
revdns = DNSName(unicode(ip.reverse_dns))
try:
revzone = DNSName(zone_for_name(revdns))
except dns.resolver.NoNameservers:
raise errors.NotFound(
reason=_(
'All nameservers failed to answer the query '
'for DNS reverse zone %(revdns)s') % dict(revdns=revdns)
)
try:
api.Command['dnszone_show'](revzone)
except errors.NotFound:
raise errors.NotFound(
reason=_(
'DNS reverse zone %(revzone)s for IP address '
'%(addr)s is not managed by this server') % dict(
addr=ipaddr, revzone=revzone)
)
revname = revdns.relativize(revzone)
return revzone, revname
def add_records_for_host_validation(option_name, host, domain, ip_addresses, check_forward=True, check_reverse=True):
assert isinstance(host, DNSName)
assert isinstance(domain, DNSName)
try:
api.Command['dnszone_show'](domain)['result']
except errors.NotFound:
raise errors.NotFound(
reason=_('DNS zone %(zone)s not found') % dict(zone=domain)
)
if not isinstance(ip_addresses, (tuple, list)):
ip_addresses = [ip_addresses]
for ip_address in ip_addresses:
try:
ip = CheckedIPAddress(
ip_address, allow_multicast=True)
except Exception as e:
raise errors.ValidationError(name=option_name, error=unicode(e))
if check_forward:
if is_forward_record(domain, unicode(ip)):
raise errors.DuplicateEntry(
message=_(u'IP address %(ip)s is already assigned in domain %(domain)s.')\
% dict(ip=str(ip), domain=domain))
if check_reverse:
try:
# we prefer lookup of the IP through the reverse zone
revzone, revname = get_reverse_zone(ip)
reverse = api.Command['dnsrecord_find'](revzone, idnsname=revname)
if reverse['count'] > 0:
raise errors.DuplicateEntry(
message=_(u'Reverse record for IP address %(ip)s already exists in reverse zone %(zone)s.')\
% dict(ip=str(ip), zone=revzone))
except errors.NotFound:
pass
def add_records_for_host(host, domain, ip_addresses, add_forward=True, add_reverse=True):
assert isinstance(host, DNSName)
assert isinstance(domain, DNSName)
if not isinstance(ip_addresses, (tuple, list)):
ip_addresses = [ip_addresses]
for ip_address in ip_addresses:
ip = CheckedIPAddress(
ip_address, allow_multicast=True)
if add_forward:
add_forward_record(domain, host, unicode(ip))
if add_reverse:
try:
revzone, revname = get_reverse_zone(ip)
addkw = {'ptrrecord': host.derelativize(domain).ToASCII()}
api.Command['dnsrecord_add'](revzone, revname, **addkw)
except errors.EmptyModlist:
# the entry already exists and matches
pass
def _dns_name_to_string(value, raw=False):
if isinstance(value, unicode):
try:
value = DNSName(value)
except Exception:
return value
assert isinstance(value, DNSName)
if raw:
return value.ToASCII()
else:
return unicode(value)
def _check_entry_objectclass(entry, objectclasses):
"""
Check if entry contains all objectclasses
"""
if not isinstance(objectclasses, (list, tuple)):
objectclasses = [objectclasses, ]
if not entry.get('objectclass'):
return False
entry_objectclasses = [o.lower() for o in entry['objectclass']]
for o in objectclasses:
if o not in entry_objectclasses:
return False
return True
def _check_DN_objectclass(ldap, dn, objectclasses):
try:
entry = ldap.get_entry(dn, [u'objectclass', ])
except Exception:
return False
else:
return _check_entry_objectclass(entry, objectclasses)
class DNSRecord(Str):
# a list of parts that create the actual raw DNS record
parts = None
# an optional list of parameters used in record-specific operations
extra = None
supported = True
# supported RR types: https://fedorahosted.org/bind-dyndb-ldap/browser/doc/schema
label_format = _("%s record")
part_label_format = "%s %s"
doc_format = _('Raw %s records')
option_group_format = _('%s Record')
see_rfc_msg = _("(see RFC %s for details)")
cli_name_format = "%s_%s"
format_error_msg = None
kwargs = Str.kwargs + (
('validatedns', bool, True),
('normalizedns', bool, True),
)
# should be replaced in subclasses
rrtype = None
rfc = None
def __init__(self, name=None, *rules, **kw):
if self.rrtype not in _record_types:
raise ValueError("Unknown RR type: %s. Must be one of %s" % \
(str(self.rrtype), ", ".join(_record_types)))
if not name:
name = "%s*" % (record_name_format % self.rrtype.lower())
kw.setdefault('cli_name', '%s_rec' % self.rrtype.lower())
kw.setdefault('label', self.label_format % self.rrtype)
kw.setdefault('doc', self.doc_format % self.rrtype)
kw.setdefault('option_group', self.option_group_format % self.rrtype)
if not self.supported:
kw['flags'] = ('no_option',)
super(DNSRecord, self).__init__(name, *rules, **kw)
def _get_part_values(self, value):
values = value.split()
if len(values) != len(self.parts):
return None
return tuple(values)
def _part_values_to_string(self, values, idna=True):
self._validate_parts(values)
parts = []
for v in values:
if v is None:
continue
if isinstance(v, DNSName) and idna:
v = v.ToASCII()
elif not isinstance(v, unicode):
v = unicode(v)
parts.append(v)
return u" ".join(parts)
def get_parts_from_kw(self, kw, raise_on_none=True):
part_names = tuple(part_name_format % (self.rrtype.lower(), part.name)
for part in self.parts)
vals = tuple(kw.get(part_name) for part_name in part_names)
if all(val is None for val in vals):
return None
if raise_on_none:
for val_id,val in enumerate(vals):
if val is None and self.parts[val_id].required:
cli_name = self.cli_name_format % (self.rrtype.lower(), self.parts[val_id].name)
raise errors.ConversionError(name=self.name,
error=_("'%s' is a required part of DNS record") % cli_name)
return vals
def _validate_parts(self, parts):
if len(parts) != len(self.parts):
raise errors.ValidationError(name=self.name,
error=_("Invalid number of parts!"))
def _convert_scalar(self, value, index=None):
if isinstance(value, (tuple, list)):
return self._part_values_to_string(value)
return super(DNSRecord, self)._convert_scalar(value)
def normalize(self, value):
if self.normalizedns: # pylint: disable=using-constant-test
if isinstance(value, (tuple, list)):
value = tuple(
self._normalize_parts(v) for v in value \
if v is not None
)
elif value is not None:
value = (self._normalize_parts(value),)
return super(DNSRecord, self).normalize(value)
def _normalize_parts(self, value):
"""
Normalize a DNS record value using normalizers for its parts.
"""
if self.parts is None:
return value
try:
values = self._get_part_values(value)
if not values:
return value
converted_values = [ part._convert_scalar(values[part_id]) \
if values[part_id] is not None else None
for part_id, part in enumerate(self.parts)
]
new_values = [ part.normalize(converted_values[part_id]) \
for part_id, part in enumerate(self.parts) ]
value = self._convert_scalar(new_values)
except Exception:
# cannot normalize, rather return original value than fail
pass
return value
def _rule_validatedns(self, _, value):
if not self.validatedns:
return None
if value is None:
return None
if not self.supported:
return _('DNS RR type "%s" is not supported by bind-dyndb-ldap plugin') \
% self.rrtype
if self.parts is None:
return None
# validate record format
values = self._get_part_values(value)
if not values:
if not self.format_error_msg:
part_names = [part.name.upper() for part in self.parts]
if self.rfc:
see_rfc_msg = " " + self.see_rfc_msg % self.rfc
else:
see_rfc_msg = ""
return _('format must be specified as "%(format)s" %(rfcs)s') \
% dict(format=" ".join(part_names), rfcs=see_rfc_msg)
else:
return self.format_error_msg
# validate every part
for part_id, part in enumerate(self.parts):
val = part.normalize(values[part_id])
val = part.convert(val)
part.validate(val)
return None
def _convert_dnsrecord_part(self, part):
"""
All parts of DNSRecord need to be processed and modified before they
can be added to global DNS API. For example a prefix need to be added
before part name so that the name is unique in the global namespace.
"""
name = part_name_format % (self.rrtype.lower(), part.name)
cli_name = self.cli_name_format % (self.rrtype.lower(), part.name)
label = self.part_label_format % (self.rrtype, unicode(part.label))
option_group = self.option_group_format % self.rrtype
flags = list(part.flags) + ['virtual_attribute']
if not part.required:
flags.append('dnsrecord_optional')
if not self.supported:
flags.append("no_option")
return part.clone_rename(name,
cli_name=cli_name,
label=label,
required=False,
option_group=option_group,
flags=flags)
def _convert_dnsrecord_extra(self, extra):
"""
Parameters for special per-type behavior need to be processed in the
same way as record parts in _convert_dnsrecord_part().
"""
name = extra_name_format % (self.rrtype.lower(), extra.name)
cli_name = self.cli_name_format % (self.rrtype.lower(), extra.name)
label = self.part_label_format % (self.rrtype, unicode(extra.label))
option_group = self.option_group_format % self.rrtype
flags = list(extra.flags) + ['virtual_attribute']
return extra.clone_rename(name,
cli_name=cli_name,
label=label,
required=False,
option_group=option_group,
flags=flags)
def get_parts(self):
if self.parts is None:
return tuple()
return tuple(self._convert_dnsrecord_part(part) for part in self.parts)
def get_extra(self):
if self.extra is None:
return tuple()
return tuple(self._convert_dnsrecord_extra(extra) for extra in self.extra)
# callbacks for per-type special record behavior
def dnsrecord_add_pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
def dnsrecord_add_post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
class ForwardRecord(DNSRecord):
extra = (
Flag('create_reverse?',
label=_('Create reverse'),
doc=_('Create reverse record for this IP Address'),
flags=['no_update']
),
)
def dnsrecord_add_pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
reverse_option = self._convert_dnsrecord_extra(self.extra[0])
if options.get(reverse_option.name):
records = entry_attrs.get(self.name, [])
if not records:
# --<rrtype>-create-reverse is set, but there are not records
raise errors.RequirementError(name=self.name)
for record in records:
add_records_for_host_validation(self.name, keys[-1], keys[-2], record,
check_forward=False,
check_reverse=True)
setattr(context, '%s_reverse' % self.name, entry_attrs.get(self.name))
def dnsrecord_add_post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
rev_records = getattr(context, '%s_reverse' % self.name, [])
if rev_records:
# make sure we don't run this post callback action again in nested
# commands, line adding PTR record in add_records_for_host
delattr(context, '%s_reverse' % self.name)
for record in rev_records:
try:
add_records_for_host(keys[-1], keys[-2], record,
add_forward=False, add_reverse=True)
except Exception as e:
raise errors.NonFatalError(
reason=_('Cannot create reverse record for "%(value)s": %(exc)s') \
% dict(value=record, exc=unicode(e)))
class UnsupportedDNSRecord(DNSRecord):
"""
Records which are not supported by IPA CLI, but we allow to show them if
LDAP contains these records.
"""
supported = False
def _get_part_values(self, value):
return tuple()
class ARecord(ForwardRecord):
rrtype = 'A'
rfc = 1035
parts = (
Str('ip_address',
_validate_ip4addr,
label=_('IP Address'),
),
)
class A6Record(DNSRecord):
rrtype = 'A6'
rfc = 3226
parts = (
Str('data',
label=_('Record data'),
),
)
def _get_part_values(self, value):
# A6 RR type is obsolete and only a raw interface is provided
return (value,)
class AAAARecord(ForwardRecord):
rrtype = 'AAAA'
rfc = 3596
parts = (
Str('ip_address',
_validate_ip6addr,
label=_('IP Address'),
),
)
class AFSDBRecord(DNSRecord):
rrtype = 'AFSDB'
rfc = 1183
parts = (
Int('subtype?',
label=_('Subtype'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('hostname',
label=_('Hostname'),
),
)
class APLRecord(UnsupportedDNSRecord):
rrtype = 'APL'
rfc = 3123
class CERTRecord(DNSRecord):
rrtype = 'CERT'
rfc = 4398
parts = (
Int('type',
label=_('Certificate Type'),
minvalue=0,
maxvalue=65535,
),
Int('key_tag',
label=_('Key Tag'),
minvalue=0,
maxvalue=65535,
),
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Str('certificate_or_crl',
label=_('Certificate/CRL'),
),
)
class CNAMERecord(DNSRecord):
rrtype = 'CNAME'
rfc = 1035
parts = (
DNSNameParam('hostname',
label=_('Hostname'),
doc=_('A hostname which this alias hostname points to'),
),
)
class DHCIDRecord(UnsupportedDNSRecord):
rrtype = 'DHCID'
rfc = 4701
class DNAMERecord(DNSRecord):
rrtype = 'DNAME'
rfc = 2672
parts = (
DNSNameParam('target',
label=_('Target'),
),
)
class DSRecord(DNSRecord):
rrtype = 'DS'
rfc = 4034
parts = (
Int('key_tag',
label=_('Key Tag'),
minvalue=0,
maxvalue=65535,
),
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Int('digest_type',
label=_('Digest Type'),
minvalue=0,
maxvalue=255,
),
Str('digest',
label=_('Digest'),
pattern=r'^[0-9a-fA-F]+$',
pattern_errmsg=u'only hexadecimal digits are allowed'
),
)
class DLVRecord(DSRecord):
# must use same attributes as DSRecord
rrtype = 'DLV'
rfc = 4431
class HINFORecord(UnsupportedDNSRecord):
rrtype = 'HINFO'
rfc = 1035
class HIPRecord(UnsupportedDNSRecord):
rrtype = 'HIP'
rfc = 5205
class KEYRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'KEY'
rfc = 2535
class IPSECKEYRecord(UnsupportedDNSRecord):
rrtype = 'IPSECKEY'
rfc = 4025
class KXRecord(DNSRecord):
rrtype = 'KX'
rfc = 2230
parts = (
Int('preference',
label=_('Preference'),
doc=_('Preference given to this exchanger. Lower values are more preferred'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('exchanger',
label=_('Exchanger'),
doc=_('A host willing to act as a key exchanger'),
),
)
class LOCRecord(DNSRecord):
rrtype = 'LOC'
rfc = 1876
parts = (
Int('lat_deg',
label=_('Degrees Latitude'),
minvalue=0,
maxvalue=90,
),
Int('lat_min?',
label=_('Minutes Latitude'),
minvalue=0,
maxvalue=59,
),
Decimal('lat_sec?',
label=_('Seconds Latitude'),
minvalue='0.0',
maxvalue='59.999',
precision=3,
),
StrEnum('lat_dir',
label=_('Direction Latitude'),
values=(u'N', u'S',),
),
Int('lon_deg',
label=_('Degrees Longitude'),
minvalue=0,
maxvalue=180,
),
Int('lon_min?',
label=_('Minutes Longitude'),
minvalue=0,
maxvalue=59,
),
Decimal('lon_sec?',
label=_('Seconds Longitude'),
minvalue='0.0',
maxvalue='59.999',
precision=3,
),
StrEnum('lon_dir',
label=_('Direction Longitude'),
values=(u'E', u'W',),
),
Decimal('altitude',
label=_('Altitude'),
minvalue='-100000.00',
maxvalue='42849672.95',
precision=2,
),
Decimal('size?',
label=_('Size'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
Decimal('h_precision?',
label=_('Horizontal Precision'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
Decimal('v_precision?',
label=_('Vertical Precision'),
minvalue='0.0',
maxvalue='90000000.00',
precision=2,
),
)
format_error_msg = _("""format must be specified as
"d1 [m1 [s1]] {"N"|"S"} d2 [m2 [s2]] {"E"|"W"} alt["m"] [siz["m"] [hp["m"] [vp["m"]]]]"
where:
d1: [0 .. 90] (degrees latitude)
d2: [0 .. 180] (degrees longitude)
m1, m2: [0 .. 59] (minutes latitude/longitude)
s1, s2: [0 .. 59.999] (seconds latitude/longitude)
alt: [-100000.00 .. 42849672.95] BY .01 (altitude in meters)
siz, hp, vp: [0 .. 90000000.00] (size/precision in meters)
See RFC 1876 for details""")
def _get_part_values(self, value):
regex = re.compile(
r'(?P<d1>\d{1,2}\s+)'
r'(?:(?P<m1>\d{1,2}\s+)'
r'(?P<s1>\d{1,2}(?:\.\d{1,3})?\s+)?)?'
r'(?P<dir1>[NS])\s+'
r'(?P<d2>\d{1,3}\s+)'
r'(?:(?P<m2>\d{1,2}\s+)'
r'(?P<s2>\d{1,2}(?:\.\d{1,3})?\s+)?)?'
r'(?P<dir2>[WE])\s+'
r'(?P<alt>-?\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<siz>\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<hp>\d{1,8}(?:\.\d{1,2})?)m?'
r'(?:\s+(?P<vp>\d{1,8}(?:\.\d{1,2})?)m?\s*)?)?)?$')
m = regex.match(value)
if m is None:
return None
return tuple(x.strip() if x is not None else x for x in m.groups())
def _validate_parts(self, parts):
super(LOCRecord, self)._validate_parts(parts)
# create part_name -> part_id map first
part_name_map = dict((part.name, part_id) \
for part_id,part in enumerate(self.parts))
requirements = ( ('lat_sec', 'lat_min'),
('lon_sec', 'lon_min'),
('h_precision', 'size'),
('v_precision', 'h_precision', 'size') )
for req in requirements:
target_part = req[0]
if parts[part_name_map[target_part]] is not None:
required_parts = req[1:]
if any(parts[part_name_map[part]] is None for part in required_parts):
target_cli_name = self.cli_name_format % (self.rrtype.lower(), req[0])
required_cli_names = [ self.cli_name_format % (self.rrtype.lower(), part)
for part in req[1:] ]
error = _("'%(required)s' must not be empty when '%(name)s' is set") % \
dict(required=', '.join(required_cli_names),
name=target_cli_name)
raise errors.ValidationError(name=self.name, error=error)
class MDRecord(UnsupportedDNSRecord):
# obsoleted, use MX instead
rrtype = 'MD'
rfc = 1035
class MINFORecord(UnsupportedDNSRecord):
rrtype = 'MINFO'
rfc = 1035
class MXRecord(DNSRecord):
rrtype = 'MX'
rfc = 1035
parts = (
Int('preference',
label=_('Preference'),
doc=_('Preference given to this exchanger. Lower values are more preferred'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('exchanger',
label=_('Exchanger'),
doc=_('A host willing to act as a mail exchanger'),
),
)
class NSRecord(DNSRecord):
rrtype = 'NS'
rfc = 1035
parts = (
DNSNameParam('hostname',
label=_('Hostname'),
),
)
class NSECRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'NSEC'
rfc = 4034
def _validate_naptr_flags(ugettext, flags):
allowed_flags = u'SAUP'
flags = flags.replace('"','').replace('\'','')
for flag in flags:
if flag not in allowed_flags:
return _('flags must be one of "S", "A", "U", or "P"')
return None
class NAPTRRecord(DNSRecord):
rrtype = 'NAPTR'
rfc = 2915
parts = (
Int('order',
label=_('Order'),
minvalue=0,
maxvalue=65535,
),
Int('preference',
label=_('Preference'),
minvalue=0,
maxvalue=65535,
),
Str('flags',
_validate_naptr_flags,
label=_('Flags'),
normalizer=lambda x:x.upper()
),
Str('service',
label=_('Service'),
),
Str('regexp',
label=_('Regular Expression'),
),
Str('replacement',
label=_('Replacement'),
),
)
class NXTRecord(UnsupportedDNSRecord):
rrtype = 'NXT'
rfc = 2535
class PTRRecord(DNSRecord):
rrtype = 'PTR'
rfc = 1035
parts = (
DNSNameParam('hostname',
#RFC 2317 section 5.2 -- can be relative
label=_('Hostname'),
doc=_('The hostname this reverse record points to'),
),
)
class RPRecord(UnsupportedDNSRecord):
rrtype = 'RP'
rfc = 1183
class SRVRecord(DNSRecord):
rrtype = 'SRV'
rfc = 2782
parts = (
Int('priority',
label=_('Priority (order)'),
doc=_('Lower number means higher priority. Clients will attempt '
'to contact the server with the lowest-numbered priority '
'they can reach.'),
minvalue=0,
maxvalue=65535,
),
Int('weight',
label=_('Weight'),
doc=_('Relative weight for entries with the same priority.'),
minvalue=0,
maxvalue=65535,
),
Int('port',
label=_('Port'),
minvalue=0,
maxvalue=65535,
),
DNSNameParam('target',
label=_('Target'),
doc=_('The domain name of the target host or \'.\' if the service is decidedly not available at this domain'),
),
)
def _sig_time_validator(ugettext, value):
time_format = "%Y%m%d%H%M%S"
try:
time.strptime(value, time_format)
except ValueError:
return _('the value does not follow "YYYYMMDDHHMMSS" time format')
return None
class SIGRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'SIG'
rfc = 2535
class SPFRecord(UnsupportedDNSRecord):
rrtype = 'SPF'
rfc = 4408
class RRSIGRecord(UnsupportedDNSRecord):
# managed by BIND itself
rrtype = 'RRSIG'
rfc = 4034
class SSHFPRecord(DNSRecord):
rrtype = 'SSHFP'
rfc = 4255
parts = (
Int('algorithm',
label=_('Algorithm'),
minvalue=0,
maxvalue=255,
),
Int('fp_type',
label=_('Fingerprint Type'),
minvalue=0,
maxvalue=255,
),
Str('fingerprint',
label=_('Fingerprint'),
),
)
def _get_part_values(self, value):
# fingerprint part can contain space in LDAP, return it as one part
values = value.split(None, 2)
if len(values) != len(self.parts):
return None
return tuple(values)
class TLSARecord(DNSRecord):
rrtype = 'TLSA'
rfc = 6698
parts = (
Int('cert_usage',
label=_('Certificate Usage'),
minvalue=0,
maxvalue=255,
),
Int('selector',
label=_('Selector'),
minvalue=0,
maxvalue=255,
),
Int('matching_type',
label=_('Matching Type'),
minvalue=0,
maxvalue=255,
),
Str('cert_association_data',
label=_('Certificate Association Data'),
),
)
class TXTRecord(DNSRecord):
rrtype = 'TXT'
rfc = 1035
parts = (
Str('data',
label=_('Text Data'),
),
)
def _get_part_values(self, value):
# ignore any space in TXT record
return (value,)
def _normalize_uri_target(uri_target):
r"""DNS-escape "\ characters and double-quote target."""
# is user-provided string is already quoted?
if uri_target[0:1] == uri_target[-1:] == '"':
uri_target = uri_target[1:-1]
# RFC 7553 section 4.4: The Target MUST NOT be an empty URI ("").
# minlength in param will detect this
if not uri_target:
return None
return u'"{0}"'.format(uri_target)
class URIRecord(DNSRecord):
rrtype = 'URI'
rfc = 7553
parts = (
Int('priority',
label=_('Priority (order)'),
doc=_('Lower number means higher priority. Clients will attempt '
'to contact the URI with the lowest-numbered priority '
'they can reach.'),
minvalue=0,
maxvalue=65535,
),
Int('weight',
label=_('Weight'),
doc=_('Relative weight for entries with the same priority.'),
minvalue=0,
maxvalue=65535,
),
Str('target',
label=_('Target Uniform Resource Identifier'),
doc=_('Target Uniform Resource Identifier according to RFC 3986'),
minlength=1,
# This field holds the URI of the target, enclosed in double-quote
# characters (e.g. "uri:").
normalizer=_normalize_uri_target,
),
)
_dns_records = (
ARecord(),
AAAARecord(),
A6Record(),
AFSDBRecord(),
APLRecord(),
CERTRecord(),
CNAMERecord(),
DHCIDRecord(),
DLVRecord(),
DNAMERecord(),
DSRecord(),
HIPRecord(),
IPSECKEYRecord(),
KEYRecord(),
KXRecord(),
LOCRecord(),
MXRecord(),
NAPTRRecord(),
NSRecord(),
NSECRecord(),
PTRRecord(),
RRSIGRecord(),
RPRecord(),
SIGRecord(),
SPFRecord(),
SRVRecord(),
SSHFPRecord(),
TLSARecord(),
TXTRecord(),
URIRecord(),
)
def __dns_record_options_iter():
for opt in (Any('dnsrecords?',
label=_('Records'),
flags=['no_create', 'no_search', 'no_update'],),
Str('dnstype?',
label=_('Record type'),
flags=['no_create', 'no_search', 'no_update'],),
Str('dnsdata?',
label=_('Record data'),
flags=['no_create', 'no_search', 'no_update'],)):
# These 3 options are used in --structured format. They are defined
# rather in takes_params than has_output_params because of their
# order - they should be printed to CLI before any DNS part param
yield opt
for option in _dns_records:
yield option
for part in option.get_parts():
yield part
for extra in option.get_extra():
yield extra
_dns_record_options = tuple(__dns_record_options_iter())
def check_ns_rec_resolvable(zone, name):
assert isinstance(zone, DNSName)
assert isinstance(name, DNSName)
if name.is_empty():
name = zone.make_absolute()
elif not name.is_absolute():
# this is a DNS name relative to the zone
name = name.derelativize(zone.make_absolute())
try:
verify_host_resolvable(name)
except errors.DNSNotARecordError:
raise errors.NotFound(
reason=_('Nameserver \'%(host)s\' does not have a corresponding '
'A/AAAA record') % {'host': name}
)
def dns_container_exists(ldap):
try:
ldap.get_entry(DN(api.env.container_dns, api.env.basedn), [])
except errors.NotFound:
return False
return True
def dnssec_installed(ldap):
"""
* Method opendnssecinstance.get_dnssec_key_masters() CANNOT be used in the
dns plugin, or any plugin accessible for common users! *
Why?: The content of service container is not readable for common users.
This method only try to find if a DNSSEC service container exists on any
replica. What means that DNSSEC key master is installed.
:param ldap: ldap connection
:return: True if DNSSEC was installed, otherwise False
"""
return is_service_enabled('DNSSEC', conn=ldap)
def default_zone_update_policy(zone):
if zone.is_reverse():
return get_dns_reverse_zone_update_policy(api.env.realm, zone.ToASCII())
else:
return get_dns_forward_zone_update_policy(api.env.realm)
def _convert_to_idna(value):
"""
Function converts a unicode value to idna, without extra validation.
If conversion fails, None is returned
"""
assert isinstance(value, unicode)
try:
idna_val = value
start_dot = u''
end_dot = u''
if idna_val.startswith(u'.'):
idna_val = idna_val[1:]
start_dot = u'.'
if idna_val.endswith(u'.'):
idna_val = idna_val[:-1]
end_dot = u'.'
idna_val = encodings.idna.nameprep(idna_val)
idna_val = re.split(r'(?<!\\)\.', idna_val)
idna_val = u'%s%s%s' % (start_dot,
u'.'.join(
encodings.idna.ToASCII(x).decode('ascii')
for x in idna_val),
end_dot)
return idna_val
except Exception:
pass
return None
def _create_idn_filter(cmd, ldap, term=None, **options):
if term:
#include idna values to search
term_idna = _convert_to_idna(term)
if term_idna and term != term_idna:
term = (term, term_idna)
search_kw = {}
attr_extra_filters = []
for attr, value in cmd.args_options_2_entry(**options).items():
if not isinstance(value, list):
value = [value]
for i, v in enumerate(value):
if isinstance(v, DNSName):
value[i] = v.ToASCII()
elif attr in map_names_to_records:
record = map_names_to_records[attr]
parts = record._get_part_values(v)
if parts is None:
value[i] = v
continue
try:
value[i] = record._part_values_to_string(parts)
except errors.ValidationError:
value[i] = v
#create MATCH_ANY filter for multivalue
if len(value) > 1:
f = ldap.make_filter({attr: value}, rules=ldap.MATCH_ANY)
attr_extra_filters.append(f)
else:
search_kw[attr] = value
if cmd.obj.search_attributes:
search_attrs = cmd.obj.search_attributes
else:
search_attrs = cmd.obj.default_attributes
if cmd.obj.search_attributes_config:
config = ldap.get_ipa_config()
config_attrs = config.get(cmd.obj.search_attributes_config, [])
if len(config_attrs) == 1 and (isinstance(config_attrs[0],
str)):
search_attrs = config_attrs[0].split(',')
search_kw['objectclass'] = cmd.obj.object_class
attr_filter = ldap.make_filter(search_kw, rules=ldap.MATCH_ALL)
if attr_extra_filters:
#combine filter if there is any idna value
attr_extra_filters.append(attr_filter)
attr_filter = ldap.combine_filters(attr_extra_filters,
rules=ldap.MATCH_ALL)
search_kw = {}
for a in search_attrs:
search_kw[a] = term
term_filter = ldap.make_filter(search_kw, exact=False)
member_filter = cmd.get_member_filter(ldap, **options)
filter = ldap.combine_filters(
(term_filter, attr_filter, member_filter), rules=ldap.MATCH_ALL
)
return filter
map_names_to_records = {record_name_format % record.rrtype.lower(): record
for record in _dns_records if record.supported}
def _records_idn_postprocess(record, **options):
for attr in record.keys():
attr = attr.lower()
try:
param = map_names_to_records[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
part_params = param.get_parts()
rrs = []
for dnsvalue in record[attr]:
parts = param._get_part_values(dnsvalue)
if parts is None:
continue
parts = list(parts)
try:
for (i, p) in enumerate(parts):
if isinstance(part_params[i], DNSNameParam):
parts[i] = DNSName(p)
rrs.append(param._part_values_to_string(parts,
idna=options.get('raw', False)))
except (errors.ValidationError, errors.ConversionError):
rrs.append(dnsvalue)
record[attr] = rrs
def _normalize_zone(zone):
if isinstance(zone, unicode):
# normalize only non-IDNA zones
try:
zone.encode('ascii')
except UnicodeError:
pass
else:
return zone.lower()
return zone
def _get_auth_zone_ldap(api, name):
"""
Find authoritative zone in LDAP for name. Only active zones are considered.
:param name:
:return: (zone, truncated)
zone: authoritative zone, or None if authoritative zone is not in LDAP
"""
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# Create all possible parent zone names
search_name = name.make_absolute()
zone_names = []
for i, name in enumerate(search_name):
zone_name_abs = DNSName(search_name[i:]).ToASCII()
zone_names.append(zone_name_abs)
# compatibility with IPA < 4.0, zone name can be relative
zone_names.append(zone_name_abs[:-1])
# Create filters
objectclass_filter = ldap.make_filter({'objectclass':'idnszone'})
zonenames_filter = ldap.make_filter({'idnsname': zone_names})
zoneactive_filter = ldap.make_filter({'idnsZoneActive': 'true'})
complete_filter = ldap.combine_filters(
[objectclass_filter, zonenames_filter, zoneactive_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname'],
base_dn=DN(api.env.container_dns, api.env.basedn),
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return None, False
# always use absolute zones
matched_auth_zones = [entry.single_value['idnsname'].make_absolute()
for entry in entries]
# return longest match
return max(matched_auth_zones, key=len), truncated
def _get_longest_match_ns_delegation_ldap(api, zone, name):
"""
Searches for deepest delegation for name in LDAP zone.
NOTE: NS record in zone apex is not considered as delegation.
It returns None if there is no delegation outside of zone apex.
Example:
zone: example.com.
name: ns.sub.example.com.
records:
extra.ns.sub.example.com.
sub.example.com.
example.com
result: sub.example.com.
:param zone: zone name
:param name:
:return: (match, truncated);
match: delegation name if success, or None if no delegation record exists
"""
assert isinstance(zone, DNSName)
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# get zone DN
zone_dn = api.Object.dnszone.get_dn(zone)
if name.is_absolute():
relative_record_name = name.relativize(zone.make_absolute())
else:
relative_record_name = name
# Name is zone apex
if relative_record_name.is_empty():
return None, False
# create list of possible record names
possible_record_names = [DNSName(relative_record_name[i:]).ToASCII()
for i in range(len(relative_record_name))]
# search filters
name_filter = ldap.make_filter({'idnsname': [possible_record_names]})
objectclass_filter = ldap.make_filter({'objectclass': 'idnsrecord'})
complete_filter = ldap.combine_filters(
[name_filter, objectclass_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname', 'nsrecord'],
base_dn=zone_dn,
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return None, False
matched_records = []
# test if entry contains NS records
for entry in entries:
if entry.get('nsrecord'):
matched_records.append(entry.single_value['idnsname'])
if not matched_records:
return None, truncated
# return longest match
return max(matched_records, key=len), truncated
def _find_subtree_forward_zones_ldap(api, name, child_zones_only=False):
"""
Search for forwardzone <name> and all child forwardzones
Filter: (|(*.<name>.)(<name>.))
:param name:
:param child_zones_only: search only for child zones
:return: (list of zonenames, truncated), list is empty if no zone found
"""
assert isinstance(name, DNSName)
ldap = api.Backend.ldap2
# prepare for filter "*.<name>."
search_name = u".%s" % name.make_absolute().ToASCII()
# we need to search zone with and without last dot, due compatibility
# with IPA < 4.0
search_names = [search_name, search_name[:-1]]
# Create filters
objectclass_filter = ldap.make_filter({'objectclass':'idnsforwardzone'})
zonenames_filter = ldap.make_filter({'idnsname': search_names}, exact=False,
trailing_wildcard=False)
if not child_zones_only:
# find also zone with exact name
exact_name = name.make_absolute().ToASCII()
# we need to search zone with and without last dot, due compatibility
# with IPA < 4.0
exact_names = [exact_name, exact_name[-1]]
exact_name_filter = ldap.make_filter({'idnsname': exact_names})
zonenames_filter = ldap.combine_filters([zonenames_filter,
exact_name_filter])
zoneactive_filter = ldap.make_filter({'idnsZoneActive': 'true'})
complete_filter = ldap.combine_filters(
[objectclass_filter, zonenames_filter, zoneactive_filter],
rules=ldap.MATCH_ALL
)
try:
entries, truncated = ldap.find_entries(
filter=complete_filter,
attrs_list=['idnsname'],
base_dn=DN(api.env.container_dns, api.env.basedn),
scope=ldap.SCOPE_ONELEVEL
)
except errors.NotFound:
return [], False
result = [entry.single_value['idnsname'].make_absolute()
for entry in entries]
return result, truncated
def _get_zone_which_makes_fw_zone_ineffective(api, fwzonename):
"""
Check if forward zone is effective.
If parent zone exists as authoritative zone, the forward zone will not
forward queries by default. It is necessary to delegate authority
to forward zone with a NS record.
Example:
Forward zone: sub.example.com
Zone: example.com
Forwarding will not work, because the server thinks it is authoritative
for zone and will return NXDOMAIN
Adding record: sub.example.com NS ns.sub.example.com.
will delegate authority, and IPA DNS server will forward DNS queries.
:param fwzonename: forwardzone
:return: (zone, truncated)
zone: None if effective, name of authoritative zone otherwise
"""
assert isinstance(fwzonename, DNSName)
auth_zone, truncated_zone = _get_auth_zone_ldap(api, fwzonename)
if not auth_zone:
return None, truncated_zone
delegation_record_name, truncated_ns =\
_get_longest_match_ns_delegation_ldap(api, auth_zone, fwzonename)
truncated = truncated_ns or truncated_zone
if delegation_record_name:
return None, truncated
return auth_zone, truncated
def _add_warning_fw_zone_is_not_effective(api, result, fwzone, version):
"""
Adds warning message to result, if required
"""
(
authoritative_zone, _truncated
) = _get_zone_which_makes_fw_zone_ineffective(api, fwzone)
if authoritative_zone:
# forward zone is not effective and forwarding will not work
messages.add_message(
version, result,
messages.ForwardzoneIsNotEffectiveWarning(
fwzone=fwzone, authzone=authoritative_zone,
ns_rec=fwzone.relativize(authoritative_zone)
)
)
def _add_warning_fw_policy_conflict_aez(result, fwzone, **options):
"""Warn if forwarding policy conflicts with an automatic empty zone."""
fwd_policy = result['result'].get(u'idnsforwardpolicy',
dnsforwardzone.default_forward_policy)
if (
fwd_policy != [u'only']
and related_to_auto_empty_zone(DNSName(fwzone))
):
messages.add_message(
options['version'], result,
messages.DNSForwardPolicyConflictWithEmptyZone()
)
class DNSZoneBase(LDAPObject):
"""
Base class for DNS Zone
"""
container_dn = api.env.container_dns
object_class = ['top']
possible_objectclasses = ['ipadnszone']
default_attributes = [
'idnsname', 'idnszoneactive', 'idnsforwarders', 'idnsforwardpolicy'
]
takes_params = (
DNSNameParam('idnsname',
_no_wildcard_validator, # RFC 4592 section 4
only_absolute=True,
cli_name='name',
label=_('Zone name'),
doc=_('Zone name (FQDN)'),
default_from=lambda name_from_ip: _reverse_zone_name(name_from_ip),
normalizer=_normalize_zone,
primary_key=True,
),
Str('name_from_ip?', _validate_ipnet,
label=_('Reverse zone IP network'),
doc=_('IP network to create reverse zone name from'),
flags=('virtual_attribute',),
),
Bool('idnszoneactive?',
cli_name='zone_active',
label=_('Active zone'),
doc=_('Is zone active?'),
flags=['no_create', 'no_update'],
attribute=True,
),
Str('idnsforwarders*',
validate_bind_forwarder,
cli_name='forwarder',
label=_('Zone forwarders'),
doc=_('Per-zone forwarders. A custom port can be specified '
'for each forwarder using a standard format "IP_ADDRESS port PORT"'),
),
StrEnum('idnsforwardpolicy?',
cli_name='forward_policy',
label=_('Forward policy'),
doc=_('Per-zone conditional forwarding policy. Set to "none" to '
'disable forwarding to global forwarder for this zone. In '
'that case, conditional zone forwarders are disregarded.'),
values=(u'only', u'first', u'none'),
),
Str('managedby',
label=_('Managedby permission'),
flags={'virtual_attribute', 'no_create', 'no_search', 'no_update'},
),
)
def get_dn(self, *keys, **options):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
zone = keys[-1]
assert isinstance(zone, DNSName)
assert zone.is_absolute()
zone_a = zone.ToASCII()
# special case when zone is the root zone ('.')
if zone == DNSName.root:
return super(DNSZoneBase, self).get_dn(zone_a, **options)
# try first relative name, a new zone has to be added as absolute
# otherwise ObjectViolation is raised
zone_a = zone_a[:-1]
dn = super(DNSZoneBase, self).get_dn(zone_a, **options)
try:
self.backend.get_entry(dn, [''])
except errors.NotFound:
zone_a = u"%s." % zone_a
dn = super(DNSZoneBase, self).get_dn(zone_a, **options)
return dn
def permission_name(self, zone):
assert isinstance(zone, DNSName)
return u"Manage DNS zone %s" % zone.ToASCII()
def get_name_in_zone(self, zone, hostname):
"""
Get name of a record that is to be added to a new zone. I.e. when
we want to add record "ipa.lab.example.com" in a zone "example.com",
this function should return "ipa.lab". Returns None when record cannot
be added to a zone. Returns '@' when the hostname is the zone record.
"""
assert isinstance(zone, DNSName)
assert zone.is_absolute()
assert isinstance(hostname, DNSName)
if not hostname.is_absolute():
return hostname
if hostname.is_subdomain(zone):
return hostname.relativize(zone)
return None
def _remove_permission(self, zone):
permission_name = self.permission_name(zone)
try:
self.api.Command['permission_del'](permission_name, force=True)
except errors.NotFound as e:
if zone == DNSName.root: # special case root zone
raise
# compatibility, older IPA versions which allows to create zone
# without absolute zone name
permission_name_rel = self.permission_name(
zone.relativize(DNSName.root)
)
try:
self.api.Command['permission_del'](permission_name_rel,
force=True)
except errors.NotFound:
raise e # re-raise original exception
def _make_zonename_absolute(self, entry_attrs, **options):
"""
Zone names can be relative in IPA < 4.0, make sure we always return
absolute zone name from ldap
"""
if options.get('raw'):
return
if "idnsname" in entry_attrs:
entry_attrs.single_value['idnsname'] = (
entry_attrs.single_value['idnsname'].make_absolute())
class DNSZoneBase_add(LDAPCreate):
takes_options = LDAPCreate.takes_options + (
Flag('skip_overlap_check',
doc=_('Force DNS zone creation even if it will overlap with '
'an existing zone.')
),
)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if options.get('name_from_ip'):
zone = _reverse_zone_name(options.get('name_from_ip'))
if keys[-1] != DNSName(zone):
raise errors.ValidationError(
name='name-from-ip',
error=_("cannot be used when a zone is specified")
)
try:
entry = ldap.get_entry(dn)
except errors.NotFound:
pass
else:
if _check_entry_objectclass(entry, self.obj.object_class):
self.obj.handle_duplicate_entry(*keys)
else:
raise errors.DuplicateEntry(
message=_(u'Only one zone type is allowed per zone name')
)
entry_attrs['idnszoneactive'] = 'TRUE'
if not options['skip_overlap_check']:
try:
check_zone_overlap(keys[-1], raise_on_error=False)
except DNSZoneAlreadyExists as e:
raise errors.InvocationError(str(e))
return dn
class DNSZoneBase_del(LDAPDelete):
def pre_callback(self, ldap, dn, *nkeys, **options):
assert isinstance(dn, DN)
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
raise self.obj.handle_not_found(*nkeys)
return dn
def post_callback(self, ldap, dn, *keys, **options):
try:
self.obj._remove_permission(keys[-1])
except errors.NotFound:
pass
return True
class DNSZoneBase_mod(LDAPUpdate):
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
self.obj._make_zonename_absolute(entry_attrs, **options)
return dn
class DNSZoneBase_find(LDAPSearch):
__doc__ = _('Search for DNS zones (SOA records).')
def args_options_2_params(self, *args, **options):
# FIXME: Check that name_from_ip is valid. This is necessary because
# custom validation rules, including _validate_ipnet, are not
# used when doing a search. Once we have a parameter type for
# IP network objects, this will no longer be necessary, as the
# parameter type will handle the validation itself (see
# <https://fedorahosted.org/freeipa/ticket/2266>).
if 'name_from_ip' in options:
self.obj.params['name_from_ip'](unicode(options['name_from_ip']))
return super(DNSZoneBase_find, self).args_options_2_params(*args, **options)
def args_options_2_entry(self, *args, **options):
if 'name_from_ip' in options:
if 'idnsname' not in options:
options['idnsname'] = self.obj.params['idnsname'].get_default(**options)
del options['name_from_ip']
search_kw = super(DNSZoneBase_find, self).args_options_2_entry(*args,
**options)
name = search_kw.get('idnsname')
if name:
search_kw['idnsname'] = [name, name.relativize(DNSName.root)]
return search_kw
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args, **options):
assert isinstance(base_dn, DN)
# Check if DNS container exists must be here for find methods
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
filter = _create_idn_filter(self, ldap, *args, **options)
return (filter, base_dn, scope)
def post_callback(self, ldap, entries, truncated, *args, **options):
for entry_attrs in entries:
self.obj._make_zonename_absolute(entry_attrs, **options)
return truncated
class DNSZoneBase_show(LDAPRetrieve):
def pre_callback(self, ldap, dn, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
return dn
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
self.obj._make_zonename_absolute(entry_attrs, **options)
return dn
class DNSZoneBase_disable(LDAPQuery):
has_output = output.standard_value
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry = ldap.get_entry(dn, ['idnszoneactive', 'objectclass'])
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
if not _check_entry_objectclass(entry, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
entry['idnszoneactive'] = ['FALSE']
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
pass
return dict(result=True, value=pkey_to_value(keys[-1], options))
class DNSZoneBase_enable(LDAPQuery):
has_output = output.standard_value
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry = ldap.get_entry(dn, ['idnszoneactive', 'objectclass'])
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
if not _check_entry_objectclass(entry, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
entry['idnszoneactive'] = ['TRUE']
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
pass
return dict(result=True, value=pkey_to_value(keys[-1], options))
class DNSZoneBase_add_permission(LDAPQuery):
has_output = _output_permissions
msg_summary = _('Added system permission "%(value)s"')
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry_attrs = ldap.get_entry(dn, ['objectclass'])
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
else:
if not _check_entry_objectclass(
entry_attrs, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
permission_name = self.obj.permission_name(keys[-1])
# compatibility with older IPA versions which allows relative zonenames
if keys[-1] != DNSName.root: # special case root zone
permission_name_rel = self.obj.permission_name(
keys[-1].relativize(DNSName.root)
)
try:
self.api.Object['permission'].get_dn_if_exists(
permission_name_rel)
except errors.NotFound:
pass
else:
# permission exists without absolute domain name
raise errors.DuplicateEntry(
message=_('permission "%(value)s" already exists') % {
'value': permission_name
}
)
permission = self.api.Command['permission_add_noaci'](permission_name,
ipapermissiontype=u'SYSTEM'
)['result']
dnszone_ocs = entry_attrs.get('objectclass')
if dnszone_ocs:
for oc in dnszone_ocs:
if oc.lower() == 'ipadnszone':
break
else:
dnszone_ocs.append('ipadnszone')
entry_attrs['managedby'] = [permission['dn']]
ldap.update_entry(entry_attrs)
return dict(
result=True,
value=pkey_to_value(permission_name, options),
)
class DNSZoneBase_remove_permission(LDAPQuery):
has_output = _output_permissions
msg_summary = _('Removed system permission "%(value)s"')
def execute(self, *keys, **options):
ldap = self.obj.backend
dn = self.obj.get_dn(*keys, **options)
try:
entry = ldap.get_entry(dn, ['managedby', 'objectclass'])
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
else:
if not _check_entry_objectclass(entry, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
entry['managedby'] = None
try:
ldap.update_entry(entry)
except errors.EmptyModlist:
# managedBy attribute is clean, lets make sure there is also no
# dangling DNS zone permission
pass
permission_name = self.obj.permission_name(keys[-1])
self.obj._remove_permission(keys[-1])
return dict(
result=True,
value=pkey_to_value(permission_name, options),
)
@register()
class dnszone(DNSZoneBase):
"""
DNS Zone, container for resource records.
"""
object_name = _('DNS zone')
object_name_plural = _('DNS zones')
object_class = DNSZoneBase.object_class + ['idnsrecord', 'idnszone']
default_attributes = DNSZoneBase.default_attributes + [
'idnssoamname', 'idnssoarname', 'idnssoaserial', 'idnssoarefresh',
'idnssoaretry', 'idnssoaexpire', 'idnssoaminimum', 'idnsallowquery',
'idnsallowtransfer', 'idnssecinlinesigning', 'idnsallowdynupdate',
'idnsupdatepolicy'
] + _record_attributes
label = _('DNS Zones')
label_singular = _('DNS Zone')
takes_params = DNSZoneBase.takes_params + (
DNSNameParam('idnssoamname?',
cli_name='name_server',
label=_('Authoritative nameserver'),
doc=_('Authoritative nameserver domain name'),
default=None, # value will be added in precallback from ldap
),
DNSNameParam('idnssoarname',
_rname_validator,
cli_name='admin_email',
label=_('Administrator e-mail address'),
doc=_('Administrator e-mail address'),
default=DNSName(u'hostmaster'),
normalizer=normalize_zonemgr,
autofill=True,
),
Int('idnssoaserial',
cli_name='serial',
label=_('SOA serial'),
doc=_('SOA record serial number'),
minvalue=1,
maxvalue=4294967295,
default_from=_create_zone_serial,
autofill=True,
),
Int('idnssoarefresh',
cli_name='refresh',
label=_('SOA refresh'),
doc=_('SOA record refresh time'),
minvalue=0,
maxvalue=2147483647,
default=3600,
autofill=True,
),
Int('idnssoaretry',
cli_name='retry',
label=_('SOA retry'),
doc=_('SOA record retry time'),
minvalue=0,
maxvalue=2147483647,
default=900,
autofill=True,
),
Int('idnssoaexpire',
cli_name='expire',
label=_('SOA expire'),
doc=_('SOA record expire time'),
default=1209600,
minvalue=0,
maxvalue=2147483647,
autofill=True,
),
Int('idnssoaminimum',
cli_name='minimum',
label=_('SOA minimum'),
doc=_('How long should negative responses be cached'),
default=3600,
minvalue=0,
maxvalue=2147483647,
autofill=True,
),
Int('dnsttl?',
cli_name='ttl',
label=_('Time to live'),
doc=_('Time to live for records at zone apex'),
minvalue=0,
maxvalue=2147483647, # see RFC 2181
),
Int('dnsdefaultttl?',
cli_name='default_ttl',
label=_('Default time to live'),
doc=_('Time to live for records without explicit TTL definition'),
minvalue=0,
maxvalue=2147483647, # see RFC 2181
),
StrEnum('dnsclass?',
# Deprecated
cli_name='class',
flags=['no_option'],
values=_record_classes,
),
Str('idnsupdatepolicy?',
cli_name='update_policy',
label=_('BIND update policy'),
doc=_('BIND update policy'),
default_from=lambda idnsname: default_zone_update_policy(idnsname),
autofill=True
),
Bool('idnsallowdynupdate?',
cli_name='dynamic_update',
label=_('Dynamic update'),
doc=_('Allow dynamic updates.'),
attribute=True,
default=False,
autofill=True
),
Str('idnsallowquery?',
_validate_bind_aci,
normalizer=_normalize_bind_aci,
cli_name='allow_query',
label=_('Allow query'),
doc=_('Semicolon separated list of IP addresses or networks which are allowed to issue queries'),
default=u'any;', # anyone can issue queries by default
autofill=True,
),
Str('idnsallowtransfer?',
_validate_bind_aci,
normalizer=_normalize_bind_aci,
cli_name='allow_transfer',
label=_('Allow transfer'),
doc=_('Semicolon separated list of IP addresses or networks which are allowed to transfer the zone'),
default=u'none;', # no one can issue queries by default
autofill=True,
),
Bool('idnsallowsyncptr?',
cli_name='allow_sync_ptr',
label=_('Allow PTR sync'),
doc=_('Allow synchronization of forward (A, AAAA) and reverse (PTR) records in the zone'),
),
Bool('idnssecinlinesigning?',
cli_name='dnssec',
default=False,
label=_('Allow in-line DNSSEC signing'),
doc=_('Allow inline DNSSEC signing of records in the zone'),
),
Str('nsec3paramrecord?',
_validate_nsec3param_record,
cli_name='nsec3param_rec',
label=_('NSEC3PARAM record'),
doc=_('NSEC3PARAM record for zone in format: hash_algorithm flags iterations salt'),
pattern=r'^\d+ \d+ \d+ (([0-9a-fA-F]{2})+|-)$',
pattern_errmsg=(u'expected format: <0-255> <0-255> <0-65535> '
'even-length_hexadecimal_digits_or_hyphen'),
),
)
# Permissions will be apllied for forwardzones too
# Store permissions into api.env.basedn, dns container could not exists
managed_permissions = {
'System: Add DNS Entries': {
'non_object': True,
'ipapermright': {'add'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'replaces': [
'(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:add dns entries";allow (add) groupdn = "ldap:///cn=add dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNS Entries': {
'non_object': True,
'ipapermright': {'read', 'search', 'compare'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'objectclass',
'a6record', 'aaaarecord', 'afsdbrecord', 'aplrecord', 'arecord',
'certrecord', 'cn', 'cnamerecord', 'dhcidrecord', 'dlvrecord',
'dnamerecord', 'dnsclass', 'dnsdefaultttl', 'dnsttl',
'dsrecord', 'hinforecord', 'hiprecord', 'idnsallowdynupdate',
'idnsallowquery', 'idnsallowsyncptr', 'idnsallowtransfer',
'idnsforwarders', 'idnsforwardpolicy', 'idnsname',
'idnssecinlinesigning', 'idnssoaexpire', 'idnssoaminimum',
'idnssoamname', 'idnssoarefresh', 'idnssoaretry',
'idnssoarname', 'idnssoaserial', 'idnsTemplateAttribute',
'idnsupdatepolicy',
'idnszoneactive', 'ipseckeyrecord','keyrecord', 'kxrecord',
'locrecord', 'managedby', 'mdrecord', 'minforecord',
'mxrecord', 'naptrrecord', 'nsecrecord', 'nsec3paramrecord',
'nsrecord', 'nxtrecord', 'ptrrecord', 'rprecord', 'rrsigrecord',
'sigrecord', 'spfrecord', 'srvrecord', 'sshfprecord',
'tlsarecord', 'txtrecord', 'urirecord', 'unknownrecord',
},
'replaces_system': ['Read DNS Entries'],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Remove DNS Entries': {
'non_object': True,
'ipapermright': {'delete'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'replaces': [
'(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:remove dns entries";allow (delete) groupdn = "ldap:///cn=remove dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Update DNS Entries': {
'non_object': True,
'ipapermright': {'write'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('idnsname=*', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'objectclass', # needed for record templates
'a6record', 'aaaarecord', 'afsdbrecord', 'aplrecord', 'arecord',
'certrecord', 'cn', 'cnamerecord', 'dhcidrecord', 'dlvrecord',
'dnamerecord', 'dnsclass', 'dnsdefaultttl', 'dnsttl',
'dsrecord', 'hinforecord', 'hiprecord', 'idnsallowdynupdate',
'idnsallowquery', 'idnsallowsyncptr', 'idnsallowtransfer',
'idnsforwarders', 'idnsforwardpolicy', 'idnsname',
'idnssecinlinesigning', 'idnssoaexpire', 'idnssoaminimum',
'idnssoamname', 'idnssoarefresh', 'idnssoaretry',
'idnssoarname', 'idnssoaserial', 'idnsTemplateAttribute',
'idnsupdatepolicy',
'idnszoneactive', 'ipseckeyrecord','keyrecord', 'kxrecord',
'locrecord', 'managedby', 'mdrecord', 'minforecord',
'mxrecord', 'naptrrecord', 'nsecrecord', 'nsec3paramrecord',
'nsrecord', 'nxtrecord', 'ptrrecord', 'rprecord', 'rrsigrecord',
'sigrecord', 'spfrecord', 'srvrecord', 'sshfprecord',
'tlsarecord', 'txtrecord', 'urirecord', 'unknownrecord',
},
'replaces': [
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy || idnsallowquery || idnsallowtransfer || idnsallowsyncptr || idnsforwardpolicy || idnsforwarders")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
'(targetattr = "idnsname || cn || idnsallowdynupdate || dnsttl || dnsclass || arecord || aaaarecord || a6record || nsrecord || cnamerecord || ptrrecord || srvrecord || txtrecord || mxrecord || mdrecord || hinforecord || minforecord || afsdbrecord || sigrecord || keyrecord || locrecord || nxtrecord || naptrrecord || kxrecord || certrecord || dnamerecord || dsrecord || sshfprecord || rrsigrecord || nsecrecord || idnsname || idnszoneactive || idnssoamname || idnssoarname || idnssoaserial || idnssoarefresh || idnssoaretry || idnssoaexpire || idnssoaminimum || idnsupdatepolicy || idnsallowquery || idnsallowtransfer || idnsallowsyncptr || idnsforwardpolicy || idnsforwarders || managedby")(target = "ldap:///idnsname=*,cn=dns,$SUFFIX")(version 3.0;acl "permission:update dns entries";allow (write) groupdn = "ldap:///cn=update dns entries,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNSSEC metadata': {
'non_object': True,
'ipapermright': {'read', 'search', 'compare'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsSecKey)'],
'ipapermdefaultattr': {
'idnsSecAlgorithm', 'idnsSecKeyCreated', 'idnsSecKeyPublish',
'idnsSecKeyActivate', 'idnsSecKeyInactive', 'idnsSecKeyDelete',
'idnsSecKeyZone', 'idnsSecKeyRevoke', 'idnsSecKeySep',
'idnsSecKeyRef', 'cn', 'objectclass',
},
'default_privileges': {'DNS Administrators'},
},
'System: Manage DNSSEC metadata': {
'non_object': True,
'ipapermright': {'all'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsSecKey)'],
'ipapermdefaultattr': {
'idnsSecAlgorithm', 'idnsSecKeyCreated', 'idnsSecKeyPublish',
'idnsSecKeyActivate', 'idnsSecKeyInactive', 'idnsSecKeyDelete',
'idnsSecKeyZone', 'idnsSecKeyRevoke', 'idnsSecKeySep',
'idnsSecKeyRef', 'cn', 'objectclass',
},
'default_privileges': {'DNS Servers'},
},
'System: Manage DNSSEC keys': {
'non_object': True,
'ipapermright': {'all'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=keys', 'cn=sec', 'cn=dns', api.env.basedn),
'ipapermdefaultattr': {
'ipaPublicKey', 'ipaPrivateKey', 'ipaSecretKey',
'ipaWrappingMech','ipaWrappingKey',
'ipaSecretKeyRef', 'ipk11Private', 'ipk11Modifiable', 'ipk11Label',
'ipk11Copyable', 'ipk11Destroyable', 'ipk11Trusted',
'ipk11CheckValue', 'ipk11StartDate', 'ipk11EndDate',
'ipk11UniqueId', 'ipk11PublicKeyInfo', 'ipk11Distrusted',
'ipk11Subject', 'ipk11Id', 'ipk11Local', 'ipk11KeyType',
'ipk11Derive', 'ipk11KeyGenMechanism', 'ipk11AllowedMechanisms',
'ipk11Encrypt', 'ipk11Verify', 'ipk11VerifyRecover', 'ipk11Wrap',
'ipk11WrapTemplate', 'ipk11Sensitive', 'ipk11Decrypt',
'ipk11Sign', 'ipk11SignRecover', 'ipk11Unwrap',
'ipk11Extractable', 'ipk11AlwaysSensitive',
'ipk11NeverExtractable', 'ipk11WrapWithTrusted',
'ipk11UnwrapTemplate', 'ipk11AlwaysAuthenticate',
'objectclass',
},
'default_privileges': {'DNS Servers'},
},
}
def _rr_zone_postprocess(self, record, **options):
#Decode IDN ACE form to Unicode, raw records are passed directly from LDAP
if options.get('raw', False):
return
_records_idn_postprocess(record, **options)
def _warning_forwarding(self, result, **options):
if ('idnsforwarders' in result['result']):
messages.add_message(options.get('version', VERSION_WITHOUT_CAPABILITIES),
result, messages.ForwardersWarning())
def _warning_name_server_option(self, result, context, **options):
if getattr(context, 'show_warning_nameserver_option', False):
messages.add_message(
options['version'],
result, messages.OptionSemanticChangedWarning(
label=_(u"setting Authoritative nameserver"),
current_behavior=_(u"It is used only for setting the "
u"SOA MNAME attribute."),
hint=_(u"NS record(s) can be edited in zone apex - '@'. ")
)
)
def _warning_fw_zone_is_not_effective(self, result, *keys, **options):
"""
Warning if any operation with zone causes, a child forward zone is
not effective
"""
zone = keys[-1]
affected_fw_zones, _truncated = _find_subtree_forward_zones_ldap(
self.api, zone, child_zones_only=True)
if not affected_fw_zones:
return
for fwzone in affected_fw_zones:
_add_warning_fw_zone_is_not_effective(self.api, result, fwzone,
options['version'])
def _warning_dnssec_master_is_not_installed(self, result, **options):
dnssec_enabled = result['result'].get("idnssecinlinesigning", False)
if dnssec_enabled and not dnssec_installed(self.api.Backend.ldap2):
messages.add_message(
options['version'],
result,
messages.DNSSECMasterNotInstalled()
)
def _warning_ttl_changed_reload_needed(self, result, **options):
if 'dnsdefaultttl' in options:
messages.add_message(
options['version'],
result,
messages.ServiceRestartRequired(
service=services.service('named', api).systemd_name,
server=_('<all IPA DNS servers>'), )
)
@register()
class dnszone_add(DNSZoneBase_add):
__doc__ = _('Create new DNS zone (SOA record).')
takes_options = DNSZoneBase_add.takes_options + (
Flag('force',
doc=_('Force DNS zone creation even if nameserver is not '
'resolvable. (Deprecated)'),
),
Flag('skip_nameserver_check',
doc=_('Force DNS zone creation even if nameserver is not '
'resolvable.'),
),
# Deprecated
# ip-address option is not used anymore, we have to keep it
# due to compability with clients older than 4.1
Str('ip_address?',
flags=['no_option', ]
),
)
def _warning_deprecated_option(self, result, **options):
if 'ip_address' in options:
messages.add_message(
options['version'],
result,
messages.OptionDeprecatedWarning(
option='ip-address',
additional_info=u"Value will be ignored.")
)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if options.get('force'):
options['skip_nameserver_check'] = True
dn = super(dnszone_add, self).pre_callback(
ldap, dn, entry_attrs, attrs_list, *keys, **options)
nameservers = [normalize_zone(x) for x in
self.api.Object.dnsrecord.get_dns_masters()]
server = normalize_zone(api.env.host)
zone = keys[-1]
if entry_attrs.get('idnssoamname'):
if zone.is_reverse() and not entry_attrs['idnssoamname'].is_absolute():
raise errors.ValidationError(
name='name-server',
error=_("Nameserver for reverse zone cannot be a relative DNS name"))
# verify if user specified server is resolvable
if not options['skip_nameserver_check']:
check_ns_rec_resolvable(keys[0], entry_attrs['idnssoamname'])
# show warning about --name-server option
context.show_warning_nameserver_option = True
else:
# user didn't specify SOA mname
if server in nameservers:
# current ipa server is authoritative nameserver in SOA record
entry_attrs['idnssoamname'] = [server]
else:
# a first DNS capable server is authoritative nameserver in SOA record
entry_attrs['idnssoamname'] = [nameservers[0]]
# all ipa DNS servers should be in NS zone record (as absolute domain name)
entry_attrs['nsrecord'] = nameservers
return dn
def execute(self, *keys, **options):
result = super(dnszone_add, self).execute(*keys, **options)
self._warning_deprecated_option(result, **options)
self.obj._warning_forwarding(result, **options)
self.obj._warning_name_server_option(result, context, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
self.obj._warning_dnssec_master_is_not_installed(result, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
# Add entry to realmdomains
# except for our own domain, reverse zones and root zone
zone = keys[0]
if (zone != DNSName(api.env.domain).make_absolute() and
not zone.is_reverse() and
zone != DNSName.root):
try:
self.api.Command['realmdomains_mod'](add_domain=unicode(zone),
force=True)
except (errors.EmptyModlist, errors.ValidationError):
pass
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_del(DNSZoneBase_del):
__doc__ = _('Delete DNS zone (SOA record).')
msg_summary = _('Deleted DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_del, self).execute(*keys, **options)
nkeys = keys[-1] # we can delete more zones
for key in nkeys:
self.obj._warning_fw_zone_is_not_effective(result, key, **options)
return result
def post_callback(self, ldap, dn, *keys, **options):
super(dnszone_del, self).post_callback(ldap, dn, *keys, **options)
# Delete entry from realmdomains
# except for our own domain, reverse zone, and root zone
zone = keys[0].make_absolute()
if (zone != DNSName(api.env.domain).make_absolute() and
not zone.is_reverse() and zone != DNSName.root
):
try:
self.api.Command['realmdomains_mod'](
del_domain=unicode(zone), force=True)
except (errors.AttrValueNotFound, errors.ValidationError):
pass
return True
@register()
class dnszone_mod(DNSZoneBase_mod):
__doc__ = _('Modify DNS zone (SOA record).')
takes_options = DNSZoneBase_mod.takes_options + (
Flag('force',
label=_('Force'),
doc=_('Force nameserver change even if nameserver not in DNS')),
)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list,
*keys, **options):
if not _check_DN_objectclass(ldap, dn, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
if 'idnssoamname' in entry_attrs:
nameserver = entry_attrs['idnssoamname']
if nameserver:
if not nameserver.is_empty() and not options['force']:
check_ns_rec_resolvable(keys[0], nameserver)
context.show_warning_nameserver_option = True
else:
# empty value, this option is required by ldap
raise errors.ValidationError(
name='name_server',
error=_(u"is required"))
return dn
def execute(self, *keys, **options):
result = super(dnszone_mod, self).execute(*keys, **options)
self.obj._warning_forwarding(result, **options)
self.obj._warning_name_server_option(result, context, **options)
self.obj._warning_dnssec_master_is_not_installed(result, **options)
self.obj._warning_ttl_changed_reload_needed(result, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
dn = super(dnszone_mod, self).post_callback(ldap, dn, entry_attrs,
*keys, **options)
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_find(DNSZoneBase_find):
__doc__ = _('Search for DNS zones (SOA records).')
takes_options = DNSZoneBase_find.takes_options + (
Flag('forward_only',
label=_('Forward zones only'),
cli_name='forward_only',
doc=_('Search for forward zones only'),
),
)
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args, **options):
assert isinstance(base_dn, DN)
filter, _base, _scope = super(dnszone_find, self).pre_callback(
ldap, filter, attrs_list, base_dn, scope, *args, **options)
if options.get('forward_only', False):
search_kw = {}
search_kw['idnsname'] = [revzone.ToASCII() for revzone in
REVERSE_DNS_ZONES]
rev_zone_filter = ldap.make_filter(search_kw,
rules=ldap.MATCH_NONE,
exact=False,
trailing_wildcard=False)
filter = ldap.combine_filters((rev_zone_filter, filter),
rules=ldap.MATCH_ALL)
return (filter, base_dn, scope)
def post_callback(self, ldap, entries, truncated, *args, **options):
truncated = super(dnszone_find, self).post_callback(ldap, entries,
truncated, *args,
**options)
for entry_attrs in entries:
self.obj._rr_zone_postprocess(entry_attrs, **options)
return truncated
@register()
class dnszone_show(DNSZoneBase_show):
__doc__ = _('Display information about a DNS zone (SOA record).')
def execute(self, *keys, **options):
result = super(dnszone_show, self).execute(*keys, **options)
self.obj._warning_forwarding(result, **options)
self.obj._warning_dnssec_master_is_not_installed(result, **options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
dn = super(dnszone_show, self).post_callback(ldap, dn, entry_attrs,
*keys, **options)
self.obj._rr_zone_postprocess(entry_attrs, **options)
return dn
@register()
class dnszone_disable(DNSZoneBase_disable):
__doc__ = _('Disable DNS Zone.')
msg_summary = _('Disabled DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_disable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnszone_enable(DNSZoneBase_enable):
__doc__ = _('Enable DNS Zone.')
msg_summary = _('Enabled DNS zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnszone_enable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnszone_add_permission(DNSZoneBase_add_permission):
__doc__ = _('Add a permission for per-zone access delegation.')
@register()
class dnszone_remove_permission(DNSZoneBase_remove_permission):
__doc__ = _('Remove a permission for per-zone access delegation.')
@register()
class dnsrecord(LDAPObject):
"""
DNS record.
"""
parent_object = 'dnszone'
container_dn = api.env.container_dns
object_name = _('DNS resource record')
object_name_plural = _('DNS resource records')
object_class = ['top', 'idnsrecord']
possible_objectclasses = ['idnsTemplateObject']
permission_filter_objectclasses = ['idnsrecord']
default_attributes = ['idnsname'] + _record_attributes
allow_rename = True
label = _('DNS Resource Records')
label_singular = _('DNS Resource Record')
takes_params = (
DNSNameParam('idnsname',
cli_name='name',
label=_('Record name'),
doc=_('Record name'),
primary_key=True,
),
Int('dnsttl?',
cli_name='ttl',
label=_('Time to live'),
doc=_('Time to live'),
minvalue=0,
maxvalue=2147483647, # see RFC 2181,
),
StrEnum('dnsclass?',
# Deprecated
cli_name='class',
flags=['no_option'],
values=_record_classes,
),
) + _dns_record_options
structured_flag = Flag('structured',
label=_('Structured'),
doc=_('Parse all raw DNS records and return them in a structured way'),
)
def _dsrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
dsrecords = entry_attrs.get('dsrecord')
if dsrecords and self.is_pkey_zone_record(*keys):
raise errors.ValidationError(
name='dsrecord',
error=unicode(_('DS record must not be in zone apex (RFC 4035 section 2.4)')))
def _nsrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
nsrecords = entry_attrs.get('nsrecord')
if options.get('force', False) or nsrecords is None:
return
for nsrecord in nsrecords:
check_ns_rec_resolvable(keys[0], DNSName(nsrecord))
def _idnsname_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if keys[-1].is_absolute():
if keys[-1].is_subdomain(keys[-2]):
entry_attrs['idnsname'] = [keys[-1].relativize(keys[-2])]
elif not self.is_pkey_zone_record(*keys):
raise errors.ValidationError(name='idnsname',
error=unicode(_('out-of-zone data: record name must '
'be a subdomain of the zone or a '
'relative name')))
# dissallowed wildcard (RFC 4592 section 4)
no_wildcard_rtypes = ['DNAME', 'DS', 'NS']
if (keys[-1].is_wild() and
any(entry_attrs.get(record_name_format % r.lower())
for r in no_wildcard_rtypes)
):
raise errors.ValidationError(
name='idnsname',
error=(_('owner of %(types)s records '
'should not be a wildcard domain name (RFC 4592 section 4)') %
{'types': ', '.join(no_wildcard_rtypes)}
)
)
def _ptrrecord_pre_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
ptrrecords = entry_attrs.get('ptrrecord')
if ptrrecords is None:
return
zone = keys[-2]
if self.is_pkey_zone_record(*keys):
addr = _dns_zone_record
else:
addr = keys[-1]
zone_len = 0
for valid_zone in REVERSE_DNS_ZONES:
if zone.is_subdomain(valid_zone):
zone = zone.relativize(valid_zone)
zone_name = valid_zone
zone_len = REVERSE_DNS_ZONES[valid_zone]
if not zone_len:
allowed_zones = ', '.join([unicode(revzone) for revzone in
REVERSE_DNS_ZONES])
raise errors.ValidationError(name='ptrrecord',
error=unicode(_('Reverse zone for PTR record should be a sub-zone of one the following fully qualified domains: %s') % allowed_zones))
addr_len = len(addr.labels)
# Classless zones (0/25.0.0.10.in-addr.arpa.) -> skip check
# zone has to be checked without reverse domain suffix (in-addr.arpa.)
for sign in (b'/', b'-'):
for name in (zone, addr):
for label in name.labels:
if sign in label:
return
ip_addr_comp_count = addr_len + len(zone.labels)
if ip_addr_comp_count != zone_len:
raise errors.ValidationError(name='ptrrecord',
error=unicode(_('Reverse zone %(name)s requires exactly '
'%(count)d IP address components, '
'%(user_count)d given')
% dict(name=zone_name,
count=zone_len,
user_count=ip_addr_comp_count)))
def run_precallback_validators(self, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
ldap = self.api.Backend.ldap2
for rtype in entry_attrs.keys():
rtype_cb = getattr(self, '_%s_pre_callback' % rtype, None)
if rtype_cb:
rtype_cb(ldap, dn, entry_attrs, *keys, **options)
def is_pkey_zone_record(self, *keys):
assert isinstance(keys[-1], DNSName)
assert isinstance(keys[-2], DNSName)
idnsname = keys[-1]
zonename = keys[-2]
if idnsname.is_empty() or idnsname == zonename:
return True
return False
def check_zone(self, zone, **options):
"""
Check if zone exists and if is master zone
"""
parent_object = self.api.Object[self.parent_object]
dn = parent_object.get_dn(zone, **options)
ldap = self.api.Backend.ldap2
try:
entry = ldap.get_entry(dn, ['objectclass'])
except errors.NotFound:
raise parent_object.handle_not_found(zone)
else:
# only master zones can contain records
if 'idnszone' not in [x.lower()
for x in entry.get('objectclass', [])]:
raise errors.ValidationError(
name='dnszoneidnsname',
error=_(u'only master zones can contain records')
)
return dn
def get_dn(self, *keys, **options):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
dn = self.check_zone(keys[-2], **options)
if self.is_pkey_zone_record(*keys):
return dn
#Make RR name relative if possible
relative_name = keys[-1].relativize(keys[-2]).ToASCII()
keys = keys[:-1] + (relative_name,)
return super(dnsrecord, self).get_dn(*keys, **options)
def attr_to_cli(self, attr):
cliname = get_record_rrtype(attr)
if not cliname:
cliname = attr
return cliname
def get_dns_masters(self):
return find_providing_servers(
'DNS', self.api.Backend.ldap2, preferred_hosts=[api.env.host]
)
def get_record_entry_attrs(self, entry_attrs):
entry_attrs = entry_attrs.copy()
for attr in tuple(entry_attrs.keys()):
if attr not in self.params or self.params[attr].primary_key:
del entry_attrs[attr]
return entry_attrs
def postprocess_record(self, record, **options):
if options.get('structured', False):
for attr in tuple(record.keys()):
# attributes in LDAPEntry may not be normalized
attr = attr.lower()
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
parts_params = param.get_parts()
for dnsvalue in record[attr]:
dnsentry = {
u'dnstype' : unicode(param.rrtype),
u'dnsdata' : dnsvalue
}
values = param._get_part_values(dnsvalue)
if values is None:
continue
for val_id, val in enumerate(values):
if val is not None:
#decode IDN
if isinstance(parts_params[val_id], DNSNameParam):
dnsentry[parts_params[val_id].name] = \
_dns_name_to_string(val,
options.get('raw', False))
else:
dnsentry[parts_params[val_id].name] = val
record.setdefault('dnsrecords', []).append(dnsentry)
del record[attr]
elif not options.get('raw', False):
#Decode IDN ACE form to Unicode, raw records are passed directly from LDAP
_records_idn_postprocess(record, **options)
def updated_rrattrs(self, old_entry, entry_attrs):
"""Returns updated RR attributes
"""
rrattrs = {}
if old_entry is not None:
old_rrattrs = dict((key, value) for key, value in old_entry.items()
if key in self.params and
isinstance(self.params[key], DNSRecord))
rrattrs.update(old_rrattrs)
new_rrattrs = dict((key, value) for key, value in entry_attrs.items()
if key in self.params and
isinstance(self.params[key], DNSRecord))
rrattrs.update(new_rrattrs)
return rrattrs
def check_record_type_collisions(self, keys, rrattrs):
# Test that only allowed combination of record types was created
# CNAME record validation
cnames = rrattrs.get('cnamerecord')
if cnames is not None:
if len(cnames) > 1:
raise errors.ValidationError(name='cnamerecord',
error=_('only one CNAME record is allowed per name '
'(RFC 2136, section 1.1.5)'))
if any(rrvalue is not None
and rrattr != 'cnamerecord'
for rrattr, rrvalue in rrattrs.items()):
raise errors.ValidationError(name='cnamerecord',
error=_('CNAME record is not allowed to coexist '
'with any other record (RFC 1034, section 3.6.2)'))
# DNAME record validation
dnames = rrattrs.get('dnamerecord')
if dnames is not None:
if len(dnames) > 1:
raise errors.ValidationError(name='dnamerecord',
error=_('only one DNAME record is allowed per name '
'(RFC 6672, section 2.4)'))
# DNAME must not coexist with CNAME, but this is already checked earlier
# NS record validation
# NS record can coexist only with A, AAAA, DS, and other NS records (except zone apex)
# RFC 2181 section 6.1,
allowed_records = ['AAAA', 'A', 'DS', 'NS']
nsrecords = rrattrs.get('nsrecord')
if nsrecords and not self.is_pkey_zone_record(*keys):
for r_type in _record_types:
if (r_type not in allowed_records
and rrattrs.get(record_name_format % r_type.lower())
):
raise errors.ValidationError(
name='nsrecord',
error=_('NS record is not allowed to coexist with an '
'%(type)s record except when located in a '
'zone root record (RFC 2181, section 6.1)') %
{'type': r_type})
def check_record_type_dependencies(self, keys, rrattrs):
# Test that all record type dependencies are satisfied
# DS record validation
# DS record requires to coexists with NS record
dsrecords = rrattrs.get('dsrecord')
nsrecords = rrattrs.get('nsrecord')
# DS record cannot be in zone apex, checked in pre-callback validators
if dsrecords and not nsrecords:
raise errors.ValidationError(
name='dsrecord',
error=_('DS record requires to coexist with an '
'NS record (RFC 4592 section 4.6, RFC 4035 section 2.4)'))
def _entry2rrsets(self, entry_attrs, dns_name, dns_domain):
'''Convert entry_attrs to a dictionary {rdtype: rrset}.
:returns:
None if entry_attrs is None
{rdtype: None} if RRset of given type is empty
{rdtype: RRset} if RRset of given type is non-empty
'''
ldap_rrsets = {}
if not entry_attrs:
# all records were deleted => name should not exist in DNS
return None
for attr, value in entry_attrs.items():
rrtype = get_record_rrtype(attr)
if not rrtype:
continue
rdtype = dns.rdatatype.from_text(rrtype)
if not value:
ldap_rrsets[rdtype] = None # RRset is empty
continue
try:
# TTL here can be arbitrary value because it is ignored
# during comparison
ldap_rrset = dns.rrset.from_text(
dns_name, 86400, dns.rdataclass.IN, rdtype,
*[str(v) for v in value])
# make sure that all names are absolute so RRset
# comparison will work
for ldap_rr in ldap_rrset:
ldap_rr.choose_relativity(origin=dns_domain,
relativize=False)
ldap_rrsets[rdtype] = ldap_rrset
except dns.exception.SyntaxError as e:
logger.error('DNS syntax error: %s %s %s: %s', dns_name,
dns.rdatatype.to_text(rdtype), value, e)
raise
return ldap_rrsets
def wait_for_modified_attr(self, ldap_rrset, rdtype, dns_name):
'''Wait until DNS resolver returns up-to-date answer for given RRset
or until the maximum number of attempts is reached.
Number of attempts is controlled by self.api.env['wait_for_dns'].
:param ldap_rrset:
None if given rdtype should not exist or
dns.rrset.RRset to match against data in DNS.
:param dns_name: FQDN to query
:type dns_name: dns.name.Name
:return: None if data in DNS and LDAP match
:raises errors.DNSDataMismatch: if data in DNS and LDAP doesn't match
:raises dns.exception.DNSException: if DNS resolution failed
'''
resolver = DNSResolver()
resolver.set_flags(0) # disable recursion (for NS RR checks)
max_attempts = int(self.api.env['wait_for_dns'])
warn_attempts = max_attempts // 2
period = 1 # second
attempt = 0
log_fn = logger.debug
log_fn('querying DNS server: expecting answer {%s}', ldap_rrset)
wait_template = 'waiting for DNS answer {%s}: got {%s} (attempt %s); '\
'waiting %s seconds before next try'
while attempt < max_attempts:
if attempt >= warn_attempts:
log_fn = logger.warning
attempt += 1
try:
dns_answer = resolver.resolve(dns_name, rdtype,
dns.rdataclass.IN,
raise_on_no_answer=False)
dns_rrset = None
if rdtype == _NS:
# NS records can be in Authority section (sometimes)
dns_rrset = dns_answer.response.get_rrset(
dns_answer.response.authority, dns_name, _IN, rdtype)
if not dns_rrset:
# Look for NS and other data in Answer section
dns_rrset = dns_answer.rrset
if dns_rrset == ldap_rrset:
log_fn('DNS answer matches expectations (attempt %s)',
attempt)
return
log_msg = wait_template % (ldap_rrset, dns_answer.response,
attempt, period)
except (dns.resolver.NXDOMAIN,
dns.resolver.YXDOMAIN,
dns.resolver.NoNameservers,
dns.resolver.Timeout) as e:
if attempt >= max_attempts:
raise
else:
log_msg = wait_template % (ldap_rrset, type(e), attempt,
period)
log_fn(log_msg)
time.sleep(period)
# Maximum number of attempts was reached
raise errors.DNSDataMismatch(expected=ldap_rrset, got=dns_rrset)
def wait_for_modified_attrs(self, entry_attrs, dns_name, dns_domain):
'''Wait until DNS resolver returns up-to-date answer for given entry
or until the maximum number of attempts is reached.
:param entry_attrs:
None if the entry was deleted from LDAP or
LDAPEntry instance containing at least all modified attributes.
:param dns_name: FQDN
:type dns_name: dns.name.Name
:raises errors.DNSDataMismatch: if data in DNS and LDAP doesn't match
'''
# represent data in LDAP as dictionary rdtype => rrset
ldap_rrsets = self._entry2rrsets(entry_attrs, dns_name, dns_domain)
nxdomain = ldap_rrsets is None
if nxdomain:
# name should not exist => ask for A record and check result
ldap_rrsets = {dns.rdatatype.from_text('A'): None}
for rdtype, ldap_rrset in ldap_rrsets.items():
try:
self.wait_for_modified_attr(ldap_rrset, rdtype, dns_name)
except dns.resolver.NXDOMAIN as e:
if nxdomain:
continue
e = errors.DNSDataMismatch(expected=ldap_rrset,
got="NXDOMAIN")
logger.error('%s', e)
raise e
except dns.resolver.NoNameservers as e:
# Do not raise exception if we have got SERVFAILs.
# Maybe the user has created an invalid zone intentionally.
logger.warning('waiting for DNS answer {%s}: got {%s}; '
'ignoring', ldap_rrset, type(e))
continue
except dns.exception.DNSException as e:
err_desc = str(type(e))
err_str = str(e)
if err_str:
err_desc += ": %s" % err_str
e = errors.DNSDataMismatch(expected=ldap_rrset, got=err_desc)
logger.error('%s', e)
raise e
def wait_for_modified_entries(self, entries):
'''Call wait_for_modified_attrs for all entries in given dict.
:param entries:
Dict {(dns_domain, dns_name): entry_for_wait_for_modified_attrs}
'''
for entry_name, entry in entries.items():
dns_domain = entry_name[0]
dns_name = entry_name[1].derelativize(dns_domain)
self.wait_for_modified_attrs(entry, dns_name, dns_domain)
def warning_if_ns_change_cause_fwzone_ineffective(self, result, *keys,
**options):
"""Detect if NS record change can make forward zones ineffective due
missing delegation. Run after parent's execute method.
"""
record_name_absolute = keys[-1]
zone = keys[-2]
if not record_name_absolute.is_absolute():
record_name_absolute = record_name_absolute.derelativize(zone)
affected_fw_zones, _truncated = _find_subtree_forward_zones_ldap(
self.api, record_name_absolute)
if not affected_fw_zones:
return
for fwzone in affected_fw_zones:
_add_warning_fw_zone_is_not_effective(self.api, result, fwzone,
options['version'])
def warning_suspicious_relative_name(self, result, *keys, **options):
"""Detect if zone name is suffix of relative record name and warn.
Zone name: test.zone.
Relative name: record.test.zone
"""
record_name = keys[-1]
zone = keys[-2]
if not record_name.is_absolute() and record_name.is_subdomain(
zone.relativize(DNSName.root)):
messages.add_message(
options['version'],
result,
messages.DNSSuspiciousRelativeName(record=record_name,
zone=zone,
fqdn=record_name + zone)
)
# Make DNS record types available as objects in the API.
# This is used by the CLI to get otherwise unavailable attributes of record
# parts.
for param in _dns_records:
register()(
type(
'dns{}record'.format(param.rrtype.lower()),
(Object,),
dict(
takes_params=param.parts or (),
)
)
)
@register()
class dnsrecord_split_parts(Command):
__doc__ = _('Split DNS record to parts')
NO_CLI = True
takes_args = (
Str('name'),
Str('value'),
)
def execute(self, name, value, *args, **options):
result = self.api.Object.dnsrecord.params[name]._get_part_values(value)
return dict(result=result)
@register()
class dnsrecord_add(LDAPCreate):
__doc__ = _('Add new DNS resource record.')
no_option_msg = 'No options to add a specific record provided.\n' \
"Command help may be consulted for all supported record types."
takes_options = LDAPCreate.takes_options + (
Flag('force',
label=_('Force'),
doc=_('force NS record creation even if its hostname is not in DNS'),
),
dnsrecord.structured_flag,
)
def args_options_2_entry(self, *keys, **options):
has_cli_options(self, options, self.no_option_msg)
return super(dnsrecord_add, self).args_options_2_entry(*keys, **options)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
precallback_attrs = []
processed_attrs = []
for option in options:
try:
param = self.params[option]
except KeyError:
continue
rrparam = get_rrparam_from_part(self, option)
if rrparam is None:
continue
if get_part_rrtype(param.name):
if rrparam.name in processed_attrs:
# this record was already entered
continue
if rrparam.name in entry_attrs:
# this record is entered both via parts and raw records
raise errors.ValidationError(name=param.cli_name or param.name,
error=_('Raw value of a DNS record was already set by "%(name)s" option') \
% dict(name=rrparam.cli_name or rrparam.name))
parts = rrparam.get_parts_from_kw(options)
dnsvalue = [rrparam._convert_scalar(parts)]
entry_attrs[rrparam.name] = dnsvalue
processed_attrs.append(rrparam.name)
continue
if get_extra_rrtype(param.name):
# do not run precallback for unset flags
if isinstance(param, Flag) and not options[option]:
continue
# extra option is passed, run per-type pre_callback for given RR type
precallback_attrs.append(rrparam.name)
# Run pre_callback validators
self.obj.run_precallback_validators(dn, entry_attrs, *keys, **options)
# run precallback also for all new RR type attributes in entry_attrs
for attr in entry_attrs.keys():
try:
param = self.params[attr]
except KeyError:
continue
if not isinstance(param, DNSRecord):
continue
precallback_attrs.append(attr)
precallback_attrs = list(set(precallback_attrs))
for attr in precallback_attrs:
# run per-type
try:
param = self.params[attr]
except KeyError:
continue
param.dnsrecord_add_pre_callback(ldap, dn, entry_attrs, attrs_list, *keys, **options)
# Store all new attrs so that DNSRecord post callback is called for
# new attributes only and not for all attributes in the LDAP entry
setattr(context, 'dnsrecord_precallback_attrs', precallback_attrs)
# We always want to retrieve all DNS record attributes to test for
# record type collisions (#2601)
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
old_entry = None
else:
for attr in entry_attrs.keys():
if attr not in _record_attributes:
continue
if entry_attrs[attr] is None:
vals = []
elif not isinstance(entry_attrs[attr], (tuple, list)):
vals = [entry_attrs[attr]]
else:
vals = list(entry_attrs[attr])
entry_attrs[attr] = list(set(old_entry.get(attr, []) + vals))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
self.obj.check_record_type_collisions(keys, rrattrs)
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def execute(self, *keys, **options):
result = super(dnsrecord_add, self).execute(*keys, **options)
self.obj.warning_suspicious_relative_name(result, *keys, **options)
return result
def exc_callback(self, keys, options, exc, call_func, *call_args, **call_kwargs):
if call_func.__name__ == 'add_entry':
if isinstance(exc, errors.DuplicateEntry):
# A new record is being added to existing LDAP DNS object
# Update can be safely run as old record values has been
# already merged in pre_callback
ldap = self.obj.backend
entry_attrs = self.obj.get_record_entry_attrs(call_args[0])
update = ldap.get_entry(entry_attrs.dn, list(entry_attrs))
update.update(entry_attrs)
ldap.update_entry(update, **call_kwargs)
return
raise exc
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
for attr in getattr(context, 'dnsrecord_precallback_attrs', []):
param = self.params[attr]
param.dnsrecord_add_post_callback(ldap, dn, entry_attrs, *keys, **options)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
return dn
@register()
class dnsrecord_mod(LDAPUpdate):
__doc__ = _('Modify a DNS resource record.')
no_option_msg = 'No options to modify a specific record provided.'
takes_options = LDAPUpdate.takes_options + (
dnsrecord.structured_flag,
)
def args_options_2_entry(self, *keys, **options):
has_cli_options(self, options, self.no_option_msg, True)
return super(dnsrecord_mod, self).args_options_2_entry(*keys, **options)
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if options.get('rename') and self.obj.is_pkey_zone_record(*keys):
# zone rename is not allowed
raise errors.ValidationError(name='rename',
error=_('DNS zone root record cannot be renamed'))
# check if any attr should be updated using structured instead of replaced
# format is recordname : (old_value, new_parts)
updated_attrs = {}
for param in iterate_rrparams_by_parts(self, options, skip_extra=True):
parts = param.get_parts_from_kw(options, raise_on_none=False)
if parts is None:
# old-style modification
continue
old_value = entry_attrs.get(param.name)
if not old_value:
raise errors.RequirementError(name=param.name)
if isinstance(old_value, (tuple, list)):
if len(old_value) > 1:
raise errors.ValidationError(name=param.name,
error=_('DNS records can be only updated one at a time'))
old_value = old_value[0]
updated_attrs[param.name] = (old_value, parts)
# Run pre_callback validators
self.obj.run_precallback_validators(dn, entry_attrs, *keys, **options)
# current entry is needed in case of per-dns-record-part updates and
# for record type collision check
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
if updated_attrs:
for attr in updated_attrs:
param = self.params[attr]
old_dnsvalue, new_parts = updated_attrs[attr]
if old_dnsvalue not in old_entry.get(attr, []):
attr_name = unicode(param.label or param.name)
raise errors.AttrValueNotFound(attr=attr_name,
value=old_dnsvalue)
old_entry[attr].remove(old_dnsvalue)
old_parts = param._get_part_values(old_dnsvalue)
modified_parts = tuple(part if part is not None else old_parts[part_id] \
for part_id,part in enumerate(new_parts))
new_dnsvalue = [param._convert_scalar(modified_parts)]
entry_attrs[attr] = list(set(old_entry[attr] + new_dnsvalue))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
self.obj.check_record_type_collisions(keys, rrattrs)
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def execute(self, *keys, **options):
result = super(dnsrecord_mod, self).execute(*keys, **options)
# remove if empty
if not self.obj.is_pkey_zone_record(*keys):
rename = options.get('rename')
if rename is not None:
keys = keys[:-1] + (rename,)
dn = self.obj.get_dn(*keys, **options)
ldap = self.obj.backend
old_entry = ldap.get_entry(dn, _record_attributes)
del_all = True
for attr in old_entry.keys():
if old_entry[attr]:
del_all = False
break
if del_all:
result = self.obj.methods.delentry(*keys,
version=options['version'])
# we need to modify delete result to match mod output type
# only one value is expected, not a list
if client_has_capability(options['version'], 'primary_key_types'):
assert len(result['value']) == 1
result['value'] = result['value'][0]
# indicate that entry was deleted
context.dnsrecord_entry_mods[(keys[0], keys[1])] = None
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
if 'nsrecord' in options:
self.obj.warning_if_ns_change_cause_fwzone_ineffective(result,
*keys,
**options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
@register()
class dnsrecord_delentry(LDAPDelete):
__doc__ = _('Delete DNS record entry.')
msg_summary = _('Deleted record "%(value)s"')
NO_CLI = True
@register()
class dnsrecord_del(LDAPUpdate):
__doc__ = _('Delete DNS resource record.')
has_output = output.standard_multi_delete
no_option_msg = _('Neither --del-all nor options to delete a specific record provided.\n'\
"Command help may be consulted for all supported record types.")
takes_options = (
Flag('del_all',
default=False,
label=_('Delete all associated records'),
),
dnsrecord.structured_flag,
Flag(
'raw',
exclude=('cli', 'webui'),
),
)
def get_options(self):
for option in super(dnsrecord_del, self).get_options():
if get_part_rrtype(option.name) or get_extra_rrtype(option.name):
continue
if option.name in ('rename', ):
# options only valid for dnsrecord-mod
continue
if isinstance(option, DNSRecord):
yield option.clone(option_group=None)
continue
yield option
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
try:
old_entry = ldap.get_entry(dn, _record_attributes)
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
for attr in entry_attrs.keys():
if attr not in _record_attributes:
continue
vals = entry_attrs[attr]
if vals is None:
continue
if not isinstance(vals, (tuple, list)):
vals = [vals]
for val in vals:
try:
old_entry[attr].remove(val)
except (KeyError, ValueError):
try:
param = self.params[attr]
attr_name = unicode(param.label or param.name)
except Exception:
attr_name = attr
raise errors.AttrValueNotFound(attr=attr_name, value=val)
entry_attrs[attr] = list(set(old_entry[attr]))
rrattrs = self.obj.updated_rrattrs(old_entry, entry_attrs)
self.obj.check_record_type_dependencies(keys, rrattrs)
del_all = False
if not self.obj.is_pkey_zone_record(*keys):
record_found = False
for attr in old_entry.keys():
if old_entry[attr]:
record_found = True
break
del_all = not record_found
# set del_all flag in context
# when the flag is enabled, the entire DNS record object is deleted
# in a post callback
context.del_all = del_all
context.dnsrecord_entry_mods = getattr(context, 'dnsrecord_entry_mods',
{})
context.dnsrecord_entry_mods[(keys[0], keys[1])] = entry_attrs.copy()
return dn
def execute(self, *keys, **options):
if options.get('del_all', False):
if self.obj.is_pkey_zone_record(*keys):
raise errors.ValidationError(
name='del_all',
error=_('Zone record \'%s\' cannot be deleted') \
% _dns_zone_record
)
result = self.obj.methods.delentry(*keys,
version=options['version'])
if self.api.env['wait_for_dns']:
entries = {(keys[0], keys[1]): None}
self.obj.wait_for_modified_entries(entries)
else:
result = super(dnsrecord_del, self).execute(*keys, **options)
result['value'] = pkey_to_value([keys[-1]], options)
if getattr(context, 'del_all', False) and not \
self.obj.is_pkey_zone_record(*keys):
result = self.obj.methods.delentry(*keys,
version=options['version'])
context.dnsrecord_entry_mods[(keys[0], keys[1])] = None
if self.api.env['wait_for_dns']:
self.obj.wait_for_modified_entries(context.dnsrecord_entry_mods)
if 'nsrecord' in options or options.get('del_all', False):
self.obj.warning_if_ns_change_cause_fwzone_ineffective(result,
*keys,
**options)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
def args_options_2_entry(self, *keys, **options):
has_cli_options(self, options, self.no_option_msg)
return super(dnsrecord_del, self).args_options_2_entry(*keys, **options)
@register()
class dnsrecord_show(LDAPRetrieve):
__doc__ = _('Display DNS resource.')
takes_options = LDAPRetrieve.takes_options + (
dnsrecord.structured_flag,
)
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
assert isinstance(dn, DN)
if self.obj.is_pkey_zone_record(*keys):
entry_attrs[self.obj.primary_key.name] = [_dns_zone_record]
self.obj.postprocess_record(entry_attrs, **options)
return dn
@register()
class dnsrecord_find(LDAPSearch):
__doc__ = _('Search for DNS resources.')
takes_options = LDAPSearch.takes_options + (
dnsrecord.structured_flag,
)
def get_options(self):
for option in super(dnsrecord_find, self).get_options():
if get_part_rrtype(option.name) or get_extra_rrtype(option.name):
continue
if isinstance(option, DNSRecord):
yield option.clone(option_group=None)
continue
yield option
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope,
dnszoneidnsname, *args, **options):
assert isinstance(base_dn, DN)
# validate if zone is master zone
self.obj.check_zone(dnszoneidnsname, **options)
filter = _create_idn_filter(self, ldap, *args, **options)
return (filter, base_dn, ldap.SCOPE_SUBTREE)
def post_callback(self, ldap, entries, truncated, *args, **options):
if entries:
zone_obj = self.api.Object[self.obj.parent_object]
zone_dn = zone_obj.get_dn(args[0])
if entries[0].dn == zone_dn:
entries[0][zone_obj.primary_key.name] = [_dns_zone_record]
for entry in entries:
self.obj.postprocess_record(entry, **options)
return truncated
@register()
class dns_resolve(Command):
__doc__ = _('Resolve a host name in DNS. (Deprecated)')
NO_CLI = True
has_output = output.simple_value
msg_summary = _('Found \'%(value)s\'')
takes_args = (
Str('hostname',
label=_('Hostname (FQDN)'),
),
)
def execute(self, *args, **options):
query=args[0]
try:
verify_host_resolvable(query)
except errors.DNSNotARecordError:
raise errors.NotFound(
reason=_('Host \'%(host)s\' not found') % {'host': query}
)
result = dict(result=True, value=query)
messages.add_message(
options['version'], result,
messages.CommandDeprecatedWarning(
command='dns-resolve',
additional_info='The command may return an unexpected result, '
'the resolution of the DNS domain is done on '
'a randomly chosen IPA server.'
)
)
return result
@register()
class dns_is_enabled(Command):
__doc__ = _('Checks if any of the servers has the DNS service enabled.')
NO_CLI = True
has_output = output.standard_value
def execute(self, *args, **options):
dns_enabled = is_service_enabled('DNS', conn=self.api.Backend.ldap2)
return dict(result=dns_enabled, value=pkey_to_value(None, options))
@register()
class dnsconfig(LDAPObject):
"""
DNS global configuration object
"""
object_name = _('DNS configuration options')
default_attributes = [
'idnsforwardpolicy', 'idnsforwarders', 'idnsallowsyncptr'
]
label = _('DNS Global Configuration')
label_singular = _('DNS Global Configuration')
takes_params = (
Str('idnsforwarders*',
validate_bind_forwarder,
cli_name='forwarder',
label=_('Global forwarders'),
doc=_('Global forwarders. A custom port can be specified for each '
'forwarder using a standard format "IP_ADDRESS port PORT"'),
),
StrEnum('idnsforwardpolicy?',
cli_name='forward_policy',
label=_('Forward policy'),
doc=_('Global forwarding policy. Set to "none" to disable '
'any configured global forwarders.'),
values=(u'only', u'first', u'none'),
),
Bool('idnsallowsyncptr?',
cli_name='allow_sync_ptr',
label=_('Allow PTR sync'),
doc=_('Allow synchronization of forward (A, AAAA) and reverse (PTR) records'),
),
Int('idnszonerefresh?',
deprecated=True,
cli_name='zone_refresh',
label=_('Zone refresh interval'),
doc=_('An interval between regular polls of the name server for new DNS zones'),
minvalue=0,
flags={'no_option'},
),
Int('ipadnsversion?', # available only in installer/upgrade
label=_('IPA DNS version'),
),
Str(
'dns_server_server*',
label=_('IPA DNS servers'),
doc=_('List of IPA masters configured as DNS servers'),
flags={'virtual_attribute', 'no_create', 'no_update'}
),
Str(
'dnssec_key_master_server?',
label=_('IPA DNSSec key master'),
doc=_('IPA server configured as DNSSec key master'),
flags={'virtual_attribute', 'no_create', 'no_update'}
)
)
managed_permissions = {
'System: Write DNS Configuration': {
'non_object': True,
'ipapermright': {'write'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsConfigObject)'],
'ipapermdefaultattr': {
'idnsallowsyncptr', 'idnsforwarders', 'idnsforwardpolicy',
'idnspersistentsearch', 'idnszonerefresh'
},
'replaces': [
'(targetattr = "idnsforwardpolicy || idnsforwarders || idnsallowsyncptr || idnszonerefresh || idnspersistentsearch")(target = "ldap:///cn=dns,$SUFFIX")(version 3.0;acl "permission:Write DNS Configuration";allow (write) groupdn = "ldap:///cn=Write DNS Configuration,cn=permissions,cn=pbac,$SUFFIX";)',
],
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
'System: Read DNS Configuration': {
'non_object': True,
'ipapermright': {'read'},
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN('cn=dns', api.env.basedn),
'ipapermtargetfilter': ['(objectclass=idnsConfigObject)'],
'ipapermdefaultattr': {
'objectclass',
'idnsallowsyncptr', 'idnsforwarders', 'idnsforwardpolicy',
'idnspersistentsearch', 'idnszonerefresh', 'ipadnsversion'
},
'default_privileges': {'DNS Administrators', 'DNS Servers'},
},
}
def get_dn(self, *keys, **kwargs):
if not dns_container_exists(self.api.Backend.ldap2):
raise errors.NotFound(reason=_('DNS is not configured'))
return DN(api.env.container_dns, api.env.basedn)
def get_dnsconfig(self, ldap):
entry = ldap.get_entry(self.get_dn(), None)
return entry
def postprocess_result(self, result):
is_config_empty = not any(
param.name in result['result'] for param in self.params() if
u'virtual_attribute' not in param.flags
)
if is_config_empty:
result['summary'] = unicode(_('Global DNS configuration is empty'))
@register()
class dnsconfig_mod(LDAPUpdate):
__doc__ = _('Modify global DNS configuration.')
def get_options(self):
"""hide ipadnsversion outside of installer/upgrade"""
for option in super(dnsconfig_mod, self).get_options():
if option.name == 'ipadnsversion':
option = option.clone(include=('installer', 'updates'))
yield option
def execute(self, *keys, **options):
# test dnssec forwarders
forwarders = options.get('idnsforwarders')
result = super(dnsconfig_mod, self).execute(*keys, **options)
self.obj.postprocess_result(result)
# this check makes sense only when resulting forwarders are non-empty
if result['result'].get('idnsforwarders'):
fwzone = DNSName('.')
_add_warning_fw_policy_conflict_aez(result, fwzone, **options)
if forwarders:
# forwarders were changed
for forwarder in forwarders:
try:
validate_dnssec_global_forwarder(forwarder)
except DNSSECSignatureMissingError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportDNSSECWarning(
server=forwarder, error=e,
)
)
except EDNS0UnsupportedError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportEDNS0Warning(
server=forwarder, error=e,
)
)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=forwarder, error=e
)
)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
self.api.Object.config.show_servroles_attributes(
entry_attrs, "DNS server", **options)
return dn
@register()
class dnsconfig_show(LDAPRetrieve):
__doc__ = _('Show the current global DNS configuration.')
def execute(self, *keys, **options):
result = super(dnsconfig_show, self).execute(*keys, **options)
self.obj.postprocess_result(result)
return result
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
self.api.Object.config.show_servroles_attributes(
entry_attrs, "DNS server", **options)
return dn
@register()
class dnsforwardzone(DNSZoneBase):
"""
DNS Forward zone, container for resource records.
"""
object_name = _('DNS forward zone')
object_name_plural = _('DNS forward zones')
object_class = DNSZoneBase.object_class + ['idnsforwardzone']
label = _('DNS Forward Zones')
label_singular = _('DNS Forward Zone')
default_forward_policy = u'first'
# managed_permissions: permissions was apllied in dnszone class, do NOT
# add them here, they should not be applied twice.
def _warning_fw_zone_is_not_effective(self, result, *keys, **options):
fwzone = keys[-1]
_add_warning_fw_zone_is_not_effective(self.api, result, fwzone,
options['version'])
def _warning_if_forwarders_do_not_work(self, result, new_zone,
*keys, **options):
fwzone = keys[-1]
forwarders = options.get('idnsforwarders', [])
any_forwarder_work = False
for forwarder in forwarders:
try:
validate_dnssec_zone_forwarder_step1(forwarder, fwzone)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=forwarder, error=e
)
)
except EDNS0UnsupportedError as e:
messages.add_message(
options['version'],
result, messages.DNSServerDoesNotSupportEDNS0Warning(
server=forwarder, error=e
)
)
else:
any_forwarder_work = True
if not any_forwarder_work:
# do not test DNSSEC validation if there is no valid forwarder
return
# resolve IP address of any DNS replica
# FIXME: https://fedorahosted.org/bind-dyndb-ldap/ticket/143
# we currenly should to test all IPA DNS replica, because DNSSEC
# validation is configured just in named.conf per replica
ipa_dns_masters = [normalize_zone(x) for x in
self.api.Object.dnsrecord.get_dns_masters()]
if not ipa_dns_masters:
# something very bad happened, DNS is installed, but no IPA DNS
# servers available
logger.error("No IPA DNS server can be found, but integrated DNS "
"is installed")
return
ipa_dns_ip = None
for rdtype in (dns.rdatatype.A, dns.rdatatype.AAAA):
try:
ans = resolve(ipa_dns_masters[0], rdtype)
except dns.exception.DNSException:
continue
else:
ipa_dns_ip = str(next(iter(ans.rrset.items)))
break
if not ipa_dns_ip:
logger.error("Cannot resolve %s hostname", ipa_dns_masters[0])
return
# sleep a bit, adding new zone to BIND from LDAP may take a while
if new_zone:
time.sleep(5)
# Test if IPA is able to receive replies from forwarders
try:
validate_dnssec_zone_forwarder_step2(ipa_dns_ip, fwzone)
except DNSSECValidationError as e:
messages.add_message(
options['version'],
result, messages.DNSSECValidationFailingWarning(error=e)
)
except UnresolvableRecordError as e:
messages.add_message(
options['version'],
result, messages.DNSServerValidationWarning(
server=ipa_dns_ip, error=e
)
)
@register()
class dnsforwardzone_add(DNSZoneBase_add):
__doc__ = _('Create new DNS forward zone.')
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
dn = super(dnsforwardzone_add, self).pre_callback(ldap, dn,
entry_attrs, attrs_list, *keys, **options)
if 'idnsforwardpolicy' not in entry_attrs:
entry_attrs['idnsforwardpolicy'] = self.obj.default_forward_policy
if (not entry_attrs.get('idnsforwarders') and
entry_attrs['idnsforwardpolicy'] != u'none'):
raise errors.ValidationError(name=u'idnsforwarders',
error=_('Please specify forwarders.'))
return dn
def execute(self, *keys, **options):
fwzone = keys[-1]
result = super(dnsforwardzone_add, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
_add_warning_fw_policy_conflict_aez(result, fwzone, **options)
if options.get('idnsforwarders'):
self.obj._warning_if_forwarders_do_not_work(
result, True, *keys, **options)
return result
@register()
class dnsforwardzone_del(DNSZoneBase_del):
__doc__ = _('Delete DNS forward zone.')
msg_summary = _('Deleted DNS forward zone "%(value)s"')
@register()
class dnsforwardzone_mod(DNSZoneBase_mod):
__doc__ = _('Modify DNS forward zone.')
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
try:
entry = ldap.get_entry(dn)
except errors.NotFound:
raise self.obj.handle_not_found(*keys)
if not _check_entry_objectclass(entry, self.obj.object_class):
raise self.obj.handle_not_found(*keys)
policy = self.obj.default_forward_policy
forwarders = []
if 'idnsforwarders' in entry_attrs:
forwarders = entry_attrs['idnsforwarders']
elif 'idnsforwarders' in entry:
forwarders = entry['idnsforwarders']
if 'idnsforwardpolicy' in entry_attrs:
policy = entry_attrs['idnsforwardpolicy']
elif 'idnsforwardpolicy' in entry:
policy = entry['idnsforwardpolicy']
if not forwarders and policy != u'none':
raise errors.ValidationError(name=u'idnsforwarders',
error=_('Please specify forwarders.'))
return dn
def execute(self, *keys, **options):
fwzone = keys[-1]
result = super(dnsforwardzone_mod, self).execute(*keys, **options)
_add_warning_fw_policy_conflict_aez(result, fwzone, **options)
if options.get('idnsforwarders'):
self.obj._warning_if_forwarders_do_not_work(result, False, *keys,
**options)
return result
@register()
class dnsforwardzone_find(DNSZoneBase_find):
__doc__ = _('Search for DNS forward zones.')
@register()
class dnsforwardzone_show(DNSZoneBase_show):
__doc__ = _('Display information about a DNS forward zone.')
@register()
class dnsforwardzone_disable(DNSZoneBase_disable):
__doc__ = _('Disable DNS Forward Zone.')
msg_summary = _('Disabled DNS forward zone "%(value)s"')
@register()
class dnsforwardzone_enable(DNSZoneBase_enable):
__doc__ = _('Enable DNS Forward Zone.')
msg_summary = _('Enabled DNS forward zone "%(value)s"')
def execute(self, *keys, **options):
result = super(dnsforwardzone_enable, self).execute(*keys, **options)
self.obj._warning_fw_zone_is_not_effective(result, *keys, **options)
return result
@register()
class dnsforwardzone_add_permission(DNSZoneBase_add_permission):
__doc__ = _('Add a permission for per-forward zone access delegation.')
@register()
class dnsforwardzone_remove_permission(DNSZoneBase_remove_permission):
__doc__ = _('Remove a permission for per-forward zone access delegation.')
@register()
class dns_system_records(Object):
takes_params = (
Str(
'ipa_records*',
label=_('IPA DNS records')
),
Str(
'location_records*',
label=_('IPA location records')
)
)
@register()
class dns_update_system_records(Method):
__doc__ = _('Update location and IPA server DNS records')
obj_name = 'dns_system_records'
attr_name = 'update'
has_output = (
output.Entry(
'result',
),
output.Output(
'value', bool,
_('Result of the command'), ['no_display']
)
)
takes_options = (
Flag(
'dry_run',
label=_('Dry run'),
doc=_('Do not update records only return expected records')
)
)
def execute(self, *args, **options):
def output_to_list(iterable):
rec_list = []
for name, node in iterable:
rec_list.extend(IPASystemRecords.records_list_from_node(
name, node))
return rec_list
def output_to_list_with_failed(iterable):
err_rec_list = []
for name, node, error in iterable:
err_rec_list.extend([
(v, unicode(error)) for v in
IPASystemRecords.records_list_from_node(name, node)
])
return err_rec_list
result = {
'result': {},
'value': True,
}
system_records = IPASystemRecords(self.api)
if options.get('dry_run'):
result['result']['ipa_records'] = output_to_list(
system_records.get_base_records().items())
result['result']['location_records'] = output_to_list(
system_records.get_locations_records().items())
else:
try:
(
(success_base, failed_base),
(success_loc, failed_loc),
) = system_records.update_dns_records()
except IPADomainIsNotManagedByIPAError:
result['value'] = False
self.add_message(
messages.DNSUpdateNotIPAManagedZone(
zone=self.api.env.domain)
)
result['result']['ipa_records'] = output_to_list(
system_records.get_base_records().items())
else:
if success_base:
result['result']['ipa_records'] = output_to_list(
success_base)
if success_loc:
result['result']['location_records'] = output_to_list(
success_loc)
for failed in (failed_base, failed_loc):
for record, error in output_to_list_with_failed(failed):
self.add_message(
messages.DNSUpdateOfSystemRecordFailed(
record=record,
error=error
)
)
if failed_base or failed_loc:
result['value'] = False
return result
| gpl-3.0 |
CJ8664/servo | tests/wpt/web-platform-tests/tools/py/py/_builtin.py | 259 | 6521 | import sys
try:
reversed = reversed
except NameError:
def reversed(sequence):
"""reversed(sequence) -> reverse iterator over values of the sequence
Return a reverse iterator
"""
if hasattr(sequence, '__reversed__'):
return sequence.__reversed__()
if not hasattr(sequence, '__getitem__'):
raise TypeError("argument to reversed() must be a sequence")
return reversed_iterator(sequence)
class reversed_iterator(object):
def __init__(self, seq):
self.seq = seq
self.remaining = len(seq)
def __iter__(self):
return self
def next(self):
i = self.remaining
if i > 0:
i -= 1
item = self.seq[i]
self.remaining = i
return item
raise StopIteration
def __length_hint__(self):
return self.remaining
try:
any = any
except NameError:
def any(iterable):
for x in iterable:
if x:
return True
return False
try:
all = all
except NameError:
def all(iterable):
for x in iterable:
if not x:
return False
return True
try:
sorted = sorted
except NameError:
builtin_cmp = cmp # need to use cmp as keyword arg
def sorted(iterable, cmp=None, key=None, reverse=0):
use_cmp = None
if key is not None:
if cmp is None:
def use_cmp(x, y):
return builtin_cmp(x[0], y[0])
else:
def use_cmp(x, y):
return cmp(x[0], y[0])
l = [(key(element), element) for element in iterable]
else:
if cmp is not None:
use_cmp = cmp
l = list(iterable)
if use_cmp is not None:
l.sort(use_cmp)
else:
l.sort()
if reverse:
l.reverse()
if key is not None:
return [element for (_, element) in l]
return l
try:
set, frozenset = set, frozenset
except NameError:
from sets import set, frozenset
# pass through
enumerate = enumerate
try:
BaseException = BaseException
except NameError:
BaseException = Exception
try:
GeneratorExit = GeneratorExit
except NameError:
class GeneratorExit(Exception):
""" This exception is never raised, it is there to make it possible to
write code compatible with CPython 2.5 even in lower CPython
versions."""
pass
GeneratorExit.__module__ = 'exceptions'
_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
try:
callable = callable
except NameError:
def callable(obj):
return hasattr(obj, "__call__")
if sys.version_info >= (3, 0):
exec ("print_ = print ; exec_=exec")
import builtins
# some backward compatibility helpers
_basestring = str
def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
if errors is None:
obj = obj.decode(encoding)
else:
obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
def _isbytes(x):
return isinstance(x, bytes)
def _istext(x):
return isinstance(x, str)
text = str
bytes = bytes
def _getimself(function):
return getattr(function, '__self__', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
return getattr(function, "__code__", None)
def execfile(fn, globs=None, locs=None):
if globs is None:
back = sys._getframe(1)
globs = back.f_globals
locs = back.f_locals
del back
elif locs is None:
locs = globs
fp = open(fn, "r")
try:
source = fp.read()
finally:
fp.close()
co = compile(source, fn, "exec", dont_inherit=True)
exec_(co, globs, locs)
else:
import __builtin__ as builtins
_totext = unicode
_basestring = basestring
text = unicode
bytes = str
execfile = execfile
callable = callable
def _isbytes(x):
return isinstance(x, str)
def _istext(x):
return isinstance(x, unicode)
def _getimself(function):
return getattr(function, 'im_self', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
try:
return getattr(function, "__code__")
except AttributeError:
return getattr(function, "func_code", None)
def print_(*args, **kwargs):
""" minimal backport of py3k print statement. """
sep = ' '
if 'sep' in kwargs:
sep = kwargs.pop('sep')
end = '\n'
if 'end' in kwargs:
end = kwargs.pop('end')
file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
if kwargs:
args = ", ".join([str(x) for x in kwargs])
raise TypeError("invalid keyword arguments: %s" % args)
at_start = True
for x in args:
if not at_start:
file.write(sep)
file.write(str(x))
at_start = False
file.write(end)
def exec_(obj, globals=None, locals=None):
""" minimal backport of py3k exec statement. """
__tracebackhide__ = True
if globals is None:
frame = sys._getframe(1)
globals = frame.f_globals
if locals is None:
locals = frame.f_locals
elif locals is None:
locals = globals
exec2(obj, globals, locals)
if sys.version_info >= (3, 0):
def _reraise(cls, val, tb):
__tracebackhide__ = True
assert hasattr(val, '__traceback__')
raise cls.with_traceback(val, tb)
else:
exec ("""
def _reraise(cls, val, tb):
__tracebackhide__ = True
raise cls, val, tb
def exec2(obj, globals, locals):
__tracebackhide__ = True
exec obj in globals, locals
""")
def _tryimport(*names):
""" return the first successfully imported module. """
assert names
for name in names:
try:
__import__(name)
except ImportError:
excinfo = sys.exc_info()
else:
return sys.modules[name]
_reraise(*excinfo)
| mpl-2.0 |
Lingotek/filesystem-connector | python2/ltk/actions/rm_action.py | 2 | 9792 | from ltk.actions.action import *
class RmAction(Action):
def __init__(self, path):
Action.__init__(self, path)
self.use_delete = False
def rm_action(self, file_patterns, **kwargs):
try:
removed_folder = False
for pattern in file_patterns:
if os.path.isdir(pattern):
# print("checking folder "+self.norm_path(pattern))
if self.folder_manager.folder_exists(self.norm_path(pattern)):
self.folder_manager.remove_element(self.norm_path(pattern))
logger.info("Removed folder "+pattern)
removed_folder = True
else:
logger.warning("Folder "+str(pattern)+" has not been added and so can not be removed")
if 'directory' in kwargs and kwargs['directory']:
if not removed_folder:
logger.info("No folders to remove at the given path(s)")
return
matched_files = None
if isinstance(file_patterns,str):
file_patterns = [file_patterns]
if 'force' in kwargs and kwargs['force']:
force = True
else:
force = False
if 'id' in kwargs and kwargs['id']:
useID = True
else:
useID = False
if 'remote' in kwargs and kwargs['remote']:
self.use_delete = True
else:
self.use_delete = False
if 'all' in kwargs and kwargs['all']:
local = False
self.folder_manager.clear_all()
removed_folder = True
logger.info("Removed all folders.")
useID = False
matched_files = self.doc_manager.get_file_names()
elif 'local' in kwargs and kwargs['local']:
local = True
if 'name' in kwargs and kwargs['name']:
matched_files = []
for pattern in file_patterns:
doc = self.doc_manager.get_doc_by_prop("name",pattern)
if doc:
matched_files.append(doc['file_name'])
else:
if len(file_patterns) == 0:
self.folder_manager.clear_all()
removed_folder = True
logger.info("Removed all folders.")
useID = False
matched_files = self.doc_manager.get_file_names()
elif not useID:
local = False
# use current working directory as root for files instead of project root
if 'name' in kwargs and kwargs['name']:
matched_files = []
for pattern in file_patterns:
doc = self.doc_manager.get_doc_by_prop("name",pattern)
if doc:
matched_files.append(doc['file_name'])
else:
matched_files = self.get_doc_filenames_in_path(file_patterns)
else:
local = False
matched_files = file_patterns
if not matched_files or len(matched_files) == 0:
if useID:
raise exceptions.ResourceNotFound("No documents to remove with the specified id")
elif removed_folder:
logger.info("No documents to remove")
elif local:
raise exceptions.ResourceNotFound("Too many agruments, to specify a document to be removed locally use -l in association with -n")
elif not 'all' in kwargs or not kwargs['all']:
raise exceptions.ResourceNotFound("No documents to remove with the specified file path")
else:
raise exceptions.ResourceNotFound("No documents to remove")
is_directory = False
for pattern in file_patterns: # If attemping to remove any directory, don't print failure message
basename = os.path.basename(pattern)
if not basename or basename == "":
is_directory = True
for file_name in matched_files:
# title = os.path.basename(os.path.normpath(file_name)).split('.')[0]
self._rm_document(self.norm_path(file_name).replace(self.path,""), useID, (force or local))
except Exception as e:
# Python 3
# log_error(self.error_file_name, e)
# End Python 3
if 'string indices must be integers' in str(e):
logger.error("Error connecting to Lingotek's TMS")
else:
logger.error("Error on remove: "+str(e))
def _rm_clone(self, file_name):
trans_files = []
entry = self.doc_manager.get_doc_by_prop("file_name", file_name)
if entry:
if 'locales' in entry and entry['locales']:
locales = entry['locales']
for locale_code in locales:
if locale_code in self.locale_folders:
download_root = self.locale_folders[locale_code]
elif self.download_dir and len(self.download_dir):
download_root = os.path.join((self.download_dir if self.download_dir and self.download_dir != 'null' else ''),locale_code)
else:
download_root = locale_code
download_root = os.path.join(self.path,download_root)
source_file_name = entry['file_name']
source_path = os.path.join(self.path,os.path.dirname(source_file_name))
trans_files.extend(get_translation_files(file_name, download_root, self.download_option, self.doc_manager))
return trans_files
def _rm_document(self, file_name, useID, force):
try:
doc = None
if not useID:
relative_path = self.norm_path(file_name)
doc = self.doc_manager.get_doc_by_prop('file_name', relative_path)
title = os.path.basename(self.norm_path(file_name))
try:
document_id = doc['id']
except TypeError: # Documents specified by name must be found in the local database to be removed.
logger.warning("Document name specified for remove isn't in the local database: {0}".format(relative_path))
return
# raise exceptions.ResourceNotFound("Document name specified doesn't exist: {0}".format(document_name))
else:
document_id = file_name
doc = self.doc_manager.get_doc_by_prop('id', document_id)
if doc:
file_name = doc['file_name']
if self.use_delete:
response = self.api.document_delete(document_id)
else:
response = self.api.document_cancel(document_id)
#print (response)
if response.status_code != 204 and response.status_code != 202:
# raise_error(response.json(), "Failed to delete document {0}".format(document_name), True)
logger.error("Failed to {0} {1} remotely".format('delete' if self.use_delete else 'cancel', file_name))
else:
logger.info("{0} has been {1} remotely".format(file_name, 'deleted' if self.use_delete else 'cancelled'))
if force:
#delete local translation file(s) for the document being deleted NOTE:this does not seem to be implemented. The translation files are gathered below but nothing is ever done with them
trans_files = []
if 'clone' in self.download_option:
trans_files = self._rm_clone(file_name)
elif 'folder' in self.download_option:
trans_files = self._rm_folder(file_name)
elif 'same' in self.download_option:
download_path = self.path
trans_files = get_translation_files(file_name, download_path, self.download_option, self.doc_manager)
self.delete_local(file_name, document_id)
self.doc_manager.remove_element(document_id)
except json.decoder.JSONDecodeError:
logger.error("JSON error on removing document")
except KeyboardInterrupt:
raise_error("", "Canceled removing document")
return
except Exception as e:
log_error(self.error_file_name, e)
logger.error("Error on removing document "+str(file_name)+": "+str(e))
def _rm_folder(self, file_name):
trans_files = []
entry = self.doc_manager.get_doc_by_prop("file_name", file_name)
if entry:
if 'locales' in entry and entry['locales']:
locales = entry['locales']
for locale_code in locales:
if locale_code in self.locale_folders:
if self.locale_folders[locale_code] == 'null':
logger.warning("Download failed: folder not specified for "+locale_code)
else:
download_path = self.locale_folders[locale_code]
else:
download_path = self.download_dir
download_path = os.path.join(self.path,download_path)
trans_files.extend(get_translation_files(file_name, download_path, self.download_option, self.doc_manager))
return trans_files
| mit |
redhat-openstack/django | django/utils/version.py | 228 | 1785 | from __future__ import unicode_literals
import datetime
import os
import subprocess
def get_version(version=None):
"Returns a PEP 386-compliant version number from VERSION."
if version is None:
from django import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
| bsd-3-clause |
trdean/grEME | gr-qtgui/apps/plot_psd_base.py | 11 | 5358 | #!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, blocks
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import os, sys
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
print "Error: Program requires PyQt4 and gr-qtgui."
sys.exit(1)
try:
import scipy
except ImportError:
print "Error: Scipy required (www.scipy.org)."
sys.exit(1)
try:
from gnuradio.qtgui.plot_form import *
from gnuradio.qtgui.plot_base import *
except ImportError:
from plot_form import *
from plot_base import *
class plot_base(gr.top_block):
def __init__(self, filelist, fc, samp_rate, psdsize, start,
nsamples, max_nsamples, avg=1.0):
gr.top_block.__init__(self)
self._filelist = filelist
self._center_freq = fc
self._samp_rate = samp_rate
self._psd_size = psdsize
self._start = start
self._max_nsamps = max_nsamples
self._nsigs = len(self._filelist)
self._avg = avg
self._nsamps = nsamples
self._auto_scale = False
self._y_min = -200
self._y_max = 400
self._y_range = 130
self._y_value = 10
self._is_setup = False
self.qapp = QtGui.QApplication(sys.argv)
def setup(self):
self.skip = blocks.skiphead(self.dsize, self._start)
n = 0
self.srcs = list()
self._data_min = sys.maxint
self._data_max = -sys.maxint - 1
for f in self._filelist:
data,_min,_max = self.read_samples(f, self._start,
self._nsamps, self._psd_size)
if(_min < self._data_min):
self._data_min = _min
if(_max > self._data_max):
self._data_max = _max
self.srcs.append(self.src_type(data))
# Set default labels based on file names
fname = f.split("/")[-1]
self.gui_snk.set_line_label(n, "{0}".format(fname))
n += 1
self.connect(self.srcs[0], self.skip)
self.connect(self.skip, (self.gui_snk, 0))
for i,s in enumerate(self.srcs[1:]):
self.connect(s, (self.gui_snk, i+1))
self.gui_snk.set_update_time(0)
self.gui_snk.enable_menu(False)
# Get Python Qt references
pyQt = self.gui_snk.pyqwidget()
self.pyWin = sip.wrapinstance(pyQt, QtGui.QWidget)
self._is_setup = True
def is_setup(self):
return self._is_setup
def set_y_axis(self, y_min, y_max):
self.gui_snk.set_y_axis(y_min, y_max)
return y_min, y_max
def get_gui(self):
if(self.is_setup()):
return self.pyWin
else:
return None
def reset(self, newstart, newnsamps):
self.stop()
self.wait()
self._start = newstart
self._data_min = sys.maxint
self._data_max = -sys.maxint - 1
for s,f in zip(self.srcs, self._filelist):
data,_min,_max = self.read_samples(f, self._start, newnsamps, self._psd_size)
if(_min < self._data_min):
self._data_min = _min
if(_max > self._data_max):
self._data_max = _max
s.set_data(data)
self.start()
def setup_options(desc):
parser = OptionParser(option_class=eng_option, description=desc,
conflict_handler="resolve")
parser.add_option("-N", "--nsamples", type="int", default=1000000,
help="Set the number of samples to display [default=prints entire file]")
parser.add_option("-S", "--start", type="int", default=0,
help="Starting sample number [default=%default]")
parser.add_option("-L", "--psd-size", type="int", default=2048,
help="Set the FFT size of the PSD [default=%default]")
parser.add_option("-f", "--center-frequency", type="eng_float", default=0.0,
help="Set the center frequency of the signal [default=%default]")
parser.add_option("-r", "--sample-rate", type="eng_float", default=1.0,
help="Set the sample rate of the signal [default=%default]")
parser.add_option("-a", "--average", type="float", default=1.0,
help="Set amount of averaging (smaller=more averaging) [default=%default]")
(options, args) = parser.parse_args()
if(len(args) < 1):
parser.print_help()
sys.exit(0)
return (options,args)
| gpl-3.0 |
40223214/-2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/bisect.py | 1261 | 2595 | """Bisection algorithms."""
def insort_right(a, x, lo=0, hi=None):
"""Insert item x in list a, and keep it sorted assuming a is sorted.
If x is already in a, insert it to the right of the rightmost x.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if x < a[mid]: hi = mid
else: lo = mid+1
a.insert(lo, x)
insort = insort_right # backward compatibility
def bisect_right(a, x, lo=0, hi=None):
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e <= x, and all e in
a[i:] have e > x. So if x already appears in the list, a.insert(x) will
insert just after the rightmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if x < a[mid]: hi = mid
else: lo = mid+1
return lo
bisect = bisect_right # backward compatibility
def insort_left(a, x, lo=0, hi=None):
"""Insert item x in list a, and keep it sorted assuming a is sorted.
If x is already in a, insert it to the left of the leftmost x.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if a[mid] < x: lo = mid+1
else: hi = mid
a.insert(lo, x)
def bisect_left(a, x, lo=0, hi=None):
"""Return the index where to insert item x in list a, assuming a is sorted.
The return value i is such that all e in a[:i] have e < x, and all e in
a[i:] have e >= x. So if x already appears in the list, a.insert(x) will
insert just before the leftmost x already there.
Optional args lo (default 0) and hi (default len(a)) bound the
slice of a to be searched.
"""
if lo < 0:
raise ValueError('lo must be non-negative')
if hi is None:
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
if a[mid] < x: lo = mid+1
else: hi = mid
return lo
# Overwrite above definitions with a fast C implementation
try:
from _bisect import *
except ImportError:
pass
| agpl-3.0 |
google-code/betsynetpdf | sumatrapdf/scripts/metadata/metadata.py | 20 | 7301 | import types
def is_valid_signed(bits, val):
if type(val) not in (types.IntType, types.LongType): return False
e = bits - 1
min_val = -(2 ** e)
if val < min_val: return False
max_val = (2 ** e) - 1
if val > max_val: return False
return True
def is_valid_unsigned(bits, val):
if type(val) not in (types.IntType, types.LongType): return False
if val < 0: return False
if val > 2 ** bits: return False
return True
def is_valid_string(val):
if val is None: return True
return type(val) in (types.StringType, types.UnicodeType)
class Type(object):
def __init__(self, def_val):
self.c_type_override = None
self.set_val(def_val)
def set_val(self, val):
assert self.is_valid_val(val), "%s is not a valid value of %s" % (str(self.val), str(self))
self.val = val
def c_type(self):
if self.c_type_override is not None:
return self.c_type_override
return self.c_type_class
def get_type_typ_enum(self):
return self.type_enum
def is_struct(self):
return isinstance(self, Struct)
def is_array(self):
return isinstance(self, Array)
class Bool(Type):
c_type_class = "bool"
type_enum = "TYPE_BOOL"
def __init__(self, def_val):
super(Bool, self).__init__(def_val)
def is_valid_val(self, val):
return val in (True, False)
class U16(Type):
c_type_class = "uint16_t"
type_enum = "TYPE_U16"
def is_valid_val(self, val):
return is_valid_unsigned(16, val)
class I32(Type):
c_type_class = "int32_t"
type_enum = "TYPE_I32"
def __init__(self, def_val=0):
super(I32, self).__init__(def_val)
def is_valid_val(self, val):
return is_valid_signed(32, val)
class U32(Type):
c_type_class = "uint32_t"
type_enum = "TYPE_U32"
def is_valid_val(self, val):
return is_valid_unsigned(32, val)
class U64(Type):
c_type_class = "uint64_t"
type_enum = "TYPE_U64"
def is_valid_val(self, val):
return is_valid_unsigned(64, val)
# behaves like uint32_t, using unique name to signal intent
class Color(U32):
type_enum = "TYPE_COLOR"
class String(Type):
c_type_class = "const char *"
type_enum = "TYPE_STR"
def is_valid_val(self, val):
return is_valid_string(val)
class WString(Type):
c_type_class = "const WCHAR *"
type_enum = "TYPE_WSTR"
def is_valid_val(self, val):
return is_valid_string(val)
class Float(Type):
c_type_class = "float"
type_enum = "TYPE_FLOAT"
def is_valid_val(self, val):
return type(val) in (types.IntType, types.LongType, types.FloatType)
# struct is just a base class
# subclasses should have class instance fields which is a list of tuples:
# defining name and type of the struct members:
# fields = [ ("boolField", Bool(True), ("u32Field", U32(32))]
#
# TODO: implement struct inheritance i.e. a subclass should inherit all
# fields from its parent
class Struct(Type):
c_type_class = ""
type_enum = "TYPE_STRUCT_PTR"
fields = []
def __init__(self, *vals):
# fields must be a class variable in Struct's subclass
self.values = [Field(f.name, f.typ, f.flags) for f in self.fields]
self.c_type_override = "%s *" % self.name()
self.offset = None
for i in range(len(vals)):
self.values[i].set_val(vals[i])
def is_valid_val(self, val):
return issubclass(val, Struct)
def name(self):
return self.__class__.__name__
def as_str(self):
s = str(self) + "\n"
for v in self.values:
if isinstance(v, Field):
s += "%s: %s\n" % (v.name, str(v.val))
return s
def __setattr__(self, name, value):
# special-case self.values, which we refer to
if name == "values":
object.__setattr__(self, name, value)
return
for field in self.values:
if field.name == name:
field.set_val(value)
return
object.__setattr__(self, name, value)
class Array(Type):
c_type_class = ""
type_enum = "TYPE_ARRAY"
def __init__(self, typ, values):
# TODO: we don't support arrays of primitve values, just structs
assert issubclass(typ, Struct)
self.typ = typ
self.values = values
for v in values:
assert self.is_valid_val(v)
self.c_type_override = "Vec<%s*> *" % typ.__name__
self.offset = None
def is_valid_val(self, val):
return isinstance(val, self.typ)
def name(self):
try:
return self.typ.__name__
except:
print(self.typ)
raise
# those are bit flags
NoStore = 1
Compact = 2
class Field(object):
def __init__(self, name, typ_val, flags=0):
self.name = name
self.typ = typ_val
self.flags = flags
if self.is_no_store(): assert not self.is_compact()
if self.is_compact():
to_test = typ_val
if typ_val.is_array():
to_test = typ_val.typ
else:
assert to_test.is_struct()
for field in to_test.fields:
assert not field.is_struct()
if typ_val.is_struct():
# TODO: support NULL values for the struct, represented by using
# class for typ_val
self.val = typ_val
elif typ_val.is_array():
self.val = typ_val
else:
self.val = typ_val.val
def c_type(self):
return self.typ.c_type()
def is_struct(self):
return self.typ.is_struct()
def is_signed(self):
return type(self.typ) == I32
def is_unsigned(self):
return type(self.typ) in (Bool, U16, U32, U64, Color)
def is_bool(self):
return type(self.typ) == Bool
def is_color(self):
return type(self.typ) == Color
def is_string(self):
return type(self.typ) in (String, WString)
def is_float(self):
return type(self.typ) == Float
def is_no_store(self):
return self.flags & NoStore == NoStore
def is_compact(self):
return self.flags & Compact == Compact
def is_array(self):
return type(self.typ) == Array
def set_val(self, val):
# Note: we don't support this for struct or arrays
assert not (self.is_struct() or self.is_array())
assert self.typ.is_valid_val(val)
self.val = val
def get_typ_enum(self, for_bin=False):
type_enum = self.typ.get_type_typ_enum()
# binary doesn't have a notion of compact storage
is_compact = self.is_compact() and not for_bin
if self.is_no_store() or is_compact:
s = "(Type)(" + type_enum
if self.is_no_store():
s = s + " | TYPE_NO_STORE_MASK"
if self.is_compact():
s = s + " | TYPE_STORE_COMPACT_MASK"
return s + ")"
return type_enum
| gpl-3.0 |
tkolleh/tkolleh.github.io | node_modules/grunt-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py | 542 | 45270 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This generates makefiles suitable for inclusion into the Android build system
# via an Android.mk file. It is based on make.py, the standard makefile
# generator.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level GypAndroid.mk. This means that all
# variables in .mk-files clobber one another, and furthermore that any
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
import os
import re
import subprocess
generator_default_variables = {
'OS': 'android',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.so',
'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
'LIB_DIR': '$(obj).$(TOOLSET)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
# Map of android build system variables to set.
'aosp_build_settings',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
ALL_MODULES_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify.
gyp_all_modules:
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Map gyp target types to Android module classes.
MODULE_CLASSES = {
'static_library': 'STATIC_LIBRARIES',
'shared_library': 'SHARED_LIBRARIES',
'executable': 'EXECUTABLES',
}
def IsCPPExtension(ext):
return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
def Sourceify(path):
"""Convert a path to its source directory form. The Android backend does not
support options.generator_output, so this function is a noop."""
return path
# Map from qualified target to path to output.
# For Android, the target of these maps is a tuple ('static', 'modulename'),
# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
# since we link by module.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class AndroidMkWriter(object):
"""AndroidMkWriter packages up the writing of one target-specific Android.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, android_top_dir):
self.android_top_dir = android_top_dir
def Write(self, qualified_target, relative_target, base_path, output_filename,
spec, configs, part_of_all, write_alias_target, sdk_version):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
relative_target: qualified target name relative to the root
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for
this target
sdk_version: what to emit for LOCAL_SDK_VERSION in output
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.relative_target = relative_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec)
# Standard header.
self.WriteLn('include $(CLEAR_VARS)\n')
# Module class and name.
self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
self.WriteLn('LOCAL_MODULE := ' + self.android_module)
# Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
# The library module classes fail if the stem is set. ComputeOutputParts
# makes sure that stem == modulename in these cases.
if self.android_stem != self.android_module:
self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
if self.toolset == 'host':
self.WriteLn('LOCAL_IS_HOST_MODULE := true')
self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
else:
self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
'$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
# Grab output directories; needed for Actions and Rules.
if self.toolset == 'host':
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
else:
self.WriteLn('gyp_intermediate_dir := '
'$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
self.WriteLn('gyp_shared_intermediate_dir := '
'$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
self.WriteLn()
# List files this target depends on so that actions/rules/copies/sources
# can depend on the list.
# TODO: doesn't pull in things through transitive link deps; needed?
target_dependencies = [x[1] for x in deps if x[0] == 'path']
self.WriteLn('# Make sure our deps are built first.')
self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
local_pathify=True)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs)
# GYP generated outputs.
self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
# Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
# on both our dependency targets and our generated files.
self.WriteLn('# Make sure our deps and generated files are built first.')
self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
'$(GYP_GENERATED_OUTPUTS)')
self.WriteLn()
# Sources.
if spec.get('sources', []) or extra_sources:
self.WriteSources(spec, configs, extra_sources)
self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
write_alias_target)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = ('path', self.output_binary)
# Update global list of link dependencies.
if self.type == 'static_library':
target_link_deps[qualified_target] = ('static', self.android_module)
elif self.type == 'shared_library':
target_link_deps[qualified_target] = ('shared', self.android_module)
self.fp.close()
return self.android_module
def WriteActions(self, actions, extra_sources, extra_outputs):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
"""
for action in actions:
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
# Prepare the actual command.
command = gyp.common.EncodePOSIXShellList(action['action'])
if 'message' in action:
quiet_cmd = 'Gyp action: %s ($@)' % action['message']
else:
quiet_cmd = 'Gyp action: %s ($@)' % name
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the gyp_*
# variables for the action rule with an absolute version so that the
# output goes in the right place.
# Only write the gyp_* rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
# gyp to sometimes use these instead of system versions, e.g. bison.
# The built host binaries may not be suitable, and can cause errors.
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
# set by envsetup.
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
% main_output)
# Don't allow spaces in input/output filenames, but make an exception for
# filenames which start with '$(' since it's okay for there to be spaces
# inside of make function/macro invocations.
for input in inputs:
if not input.startswith('$(') and ' ' in input:
raise gyp.common.GypError(
'Action input filename "%s" in target %s contains a space' %
(input, self.target))
for output in outputs:
if not output.startswith('$(') and ' ' in output:
raise gyp.common.GypError(
'Action output filename "%s" in target %s contains a space' %
(output, self.target))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, ' '.join(map(self.LocalPathify, inputs))))
self.WriteLn('\t@echo "%s"' % quiet_cmd)
self.WriteLn('\t$(hide)%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
extra_outputs += outputs
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
"""
if len(rules) == 0:
return
for rule in rules:
if len(rule.get('rule_sources', [])) == 0:
continue
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
rule['rule_name']))
self.WriteLn('\n### Generated for rule "%s":' % name)
self.WriteLn('# "%s":' % rule)
inputs = rule.get('inputs')
for rule_source in rule.get('rule_sources', []):
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
dirs = set()
for out in outputs:
if not out.startswith('$'):
print ('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
extra_outputs += outputs
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.extend(outputs)
components = []
for component in rule['action']:
component = self.ExpandInputRoot(component, rule_source_root,
rule_source_dirname)
if '$(RULE_SOURCES)' in component:
component = component.replace('$(RULE_SOURCES)',
rule_source)
components.append(component)
command = gyp.common.EncodePOSIXShellList(components)
cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
command = cd_action + command
if dirs:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
# We set up a rule to build the first output, and then set up
# a rule for each additional output to depend on the first.
outputs = map(self.LocalPathify, outputs)
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
main_output_deps = self.LocalPathify(rule_source)
if inputs:
main_output_deps += ' '
main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn()
self.WriteLn()
def WriteCopies(self, copies, extra_outputs):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
"""
self.WriteLn('### Generated for copy rule.')
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# The Android build system does not allow generation of files into the
# source tree. The destination should start with a variable, which will
# typically be $(gyp_intermediate_dir) or
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
print ('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
path = Sourceify(self.LocalPathify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
filename)))
self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,
' '.join(map(make.QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteSourceFlags(self, spec, configs):
"""Write out the flags and include paths used to compile source files for
the current target.
Args:
spec, configs: input from gyp.
"""
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
# Android uses separate flags for assembly file invocations, but gyp expects
# the same CFLAGS to be applied:
self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
def WriteSources(self, spec, configs, extra_sources):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
We need to handle shared_intermediate directory source files as
a special case by copying them to the intermediate directory and
treating them as a genereated sources. Otherwise the Android build
rules won't pick them up.
Args:
spec, configs: input from gyp.
extra_sources: Sources generated from Actions or Rules.
"""
sources = filter(make.Compilable, spec.get('sources', []))
generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
extra_sources = filter(make.Compilable, extra_sources)
# Determine and output the C++ extension used by these sources.
# We simply find the first C++ file and use that extension.
all_sources = sources + extra_sources
local_cpp_extension = '.cpp'
for source in all_sources:
(root, ext) = os.path.splitext(source)
if IsCPPExtension(ext):
local_cpp_extension = ext
break
if local_cpp_extension != '.cpp':
self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
# We need to move any non-generated sources that are coming from the
# shared intermediate directory out of LOCAL_SRC_FILES and put them
# into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
# that don't match our local_cpp_extension, since Android will only
# generate Makefile rules for a single LOCAL_CPP_EXTENSION.
local_files = []
for source in sources:
(root, ext) = os.path.splitext(source)
if '$(gyp_shared_intermediate_dir)' in source:
extra_sources.append(source)
elif '$(gyp_intermediate_dir)' in source:
extra_sources.append(source)
elif IsCPPExtension(ext) and ext != local_cpp_extension:
extra_sources.append(source)
else:
local_files.append(os.path.normpath(os.path.join(self.path, source)))
# For any generated source, if it is coming from the shared intermediate
# directory then we add a Make rule to copy them to the local intermediate
# directory first. This is because the Android LOCAL_GENERATED_SOURCES
# must be in the local module intermediate directory for the compile rules
# to work properly. If the file has the wrong C++ extension, then we add
# a rule to copy that to intermediates and use the new version.
final_generated_sources = []
# If a source file gets copied, we still need to add the orginal source
# directory as header search path, for GCC searches headers in the
# directory that contains the source file by default.
origin_src_dirs = []
for source in extra_sources:
local_file = source
if not '$(gyp_intermediate_dir)/' in local_file:
basename = os.path.basename(local_file)
local_file = '$(gyp_intermediate_dir)/' + basename
(root, ext) = os.path.splitext(local_file)
if IsCPPExtension(ext) and ext != local_cpp_extension:
local_file = root + local_cpp_extension
if local_file != source:
self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
self.WriteLn('\tmkdir -p $(@D); cp $< $@')
origin_src_dirs.append(os.path.dirname(source))
final_generated_sources.append(local_file)
# We add back in all of the non-compilable stuff to make sure that the
# make rules have dependencies on them.
final_generated_sources.extend(generated_not_sources)
self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
self.WriteList(local_files, 'LOCAL_SRC_FILES')
# Write out the flags used to compile the source; this must be done last
# so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
self.WriteSourceFlags(spec, configs)
def ComputeAndroidModule(self, spec):
"""Return the Android module name used for a gyp spec.
We use the complete qualified target name to avoid collisions between
duplicate targets in different directories. We also add a suffix to
distinguish gyp-generated module names.
"""
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags.
prefix = 'lib_'
else:
prefix = ''
if spec['toolset'] == 'host':
suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
else:
suffix = '_gyp'
if self.path:
middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
else:
middle = make.StringToMakefileVariable(self.target)
return ''.join([prefix, middle, suffix])
def ComputeOutputParts(self, spec):
"""Return the 'output basename' of a gyp spec, split into filename + ext.
Android libraries must be named the same thing as their module name,
otherwise the linker can't find them, so product_name and so on must be
ignored if we are building a library, and the "lib" prepending is
not done for Android.
"""
assert self.type != 'loadable_module' # TODO: not supported?
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.a'
elif self.type == 'shared_library':
target = self.ComputeAndroidModule(spec)
target_ext = '.so'
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
target_stem = target_prefix + target
return (target_stem, target_ext)
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
return ''.join(self.ComputeOutputParts(spec))
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
if self.type == 'executable':
# We install host executables into shared_intermediate_dir so they can be
# run by gyp rules that refer to PRODUCT_DIR.
path = '$(gyp_shared_intermediate_dir)'
elif self.type == 'shared_library':
if self.toolset == 'host':
path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
else:
path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
else:
# Other targets just get built into their intermediate dir.
if self.toolset == 'host':
path = ('$(call intermediates-dir-for,%s,%s,true,,'
'$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
self.android_module))
else:
path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
% (self.android_class, self.android_module))
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory.
Args:
include_paths: A list of unprocessed include paths.
Returns:
A list of normalized include paths.
"""
normalized = []
for path in include_paths:
if path[0] == '/':
path = gyp.common.RelativePath(path, self.android_top_dir)
normalized.append(path)
return normalized
def ExtractIncludesFromCFlags(self, cflags):
"""Extract includes "-I..." out from cflags
Args:
cflags: A list of compiler flags, which may be mixed with "-I.."
Returns:
A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
"""
clean_cflags = []
include_paths = []
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
else:
clean_cflags.append(flag)
return (clean_cflags, include_paths)
def FilterLibraries(self, libraries):
"""Filter the 'libraries' key to separate things that shouldn't be ldflags.
Library entries that look like filenames should be converted to android
module names instead of being passed to the linker as flags.
Args:
libraries: the value of spec.get('libraries')
Returns:
A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
"""
static_lib_modules = []
dynamic_lib_modules = []
ldflags = []
for libs in libraries:
# Libs can have multiple words.
for lib in libs.split():
# Filter the system libraries, which are added by default by the Android
# build system.
if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
lib.endswith('libgcc.a')):
continue
match = re.search(r'([^/]+)\.a$', lib)
if match:
static_lib_modules.append(match.group(1))
continue
match = re.search(r'([^/]+)\.so$', lib)
if match:
dynamic_lib_modules.append(match.group(1))
continue
if lib.startswith('-l'):
ldflags.append(lib)
return (static_lib_modules, dynamic_lib_modules, ldflags)
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteTargetFlags(self, spec, configs, link_deps):
"""Write Makefile code to specify the link flags and library dependencies.
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
# Libraries (i.e. -lfoo)
# These must be included even for static libraries as some of them provide
# implicit include paths through the build system.
libraries = gyp.common.uniquer(spec.get('libraries', []))
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
if self.type != 'static_library':
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
'$(LOCAL_GYP_LIBS)')
# Link dependencies (i.e. other gyp targets this target depends on)
# These need not be included for static libraries as within the gyp build
# we do not use the implicit include path mechanism.
if self.type != 'static_library':
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
else:
static_link_deps = []
shared_link_deps = []
# Only write the lists if they are non-empty.
if static_libs or static_link_deps:
self.WriteLn('')
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
if dynamic_libs or shared_link_deps:
self.WriteLn('')
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
write_alias_target):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
part_of_all: flag indicating this target is part of 'all'
write_alias_target: flag indicating whether to create short aliases for this
target
"""
self.WriteLn('### Rules for final target.')
if self.type != 'none':
self.WriteTargetFlags(spec, configs, link_deps)
settings = spec.get('aosp_build_settings', {})
if settings:
self.WriteLn('### Set directly by aosp_build_settings.')
for k, v in settings.iteritems():
if isinstance(v, list):
self.WriteList(v, k)
else:
self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
self.WriteLn('')
# Add to the set of targets which represent the gyp 'all' target. We use the
# name 'gyp_all_modules' as the Android build system doesn't allow the use
# of the Make target 'all' and because 'all_modules' is the equivalent of
# the Make target 'all' on Android.
if part_of_all and write_alias_target:
self.WriteLn('# Add target alias to "gyp_all_modules" target.')
self.WriteLn('.PHONY: gyp_all_modules')
self.WriteLn('gyp_all_modules: %s' % self.android_module)
self.WriteLn('')
# Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test
# framework.
if self.target != self.android_module and write_alias_target:
self.WriteLn('# Alias gyp target name.')
self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
# NOTE: This has to come last!
modifier = ''
if self.toolset == 'host':
modifier = 'HOST_'
if self.type == 'static_library':
self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
elif self.type == 'shared_library':
self.WriteLn('LOCAL_PRELINK_MODULE := false')
self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
elif self.type == 'executable':
# Executables are for build and test purposes only, so they're installed
# to a directory that doesn't get included in the system image.
self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
else:
self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
if self.toolset == 'target':
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
else:
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
self.WriteLn()
self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
self.WriteLn()
self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) touch $@')
self.WriteLn()
self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
def WriteList(self, value_list, variable=None, prefix='',
quoter=make.QuoteIfNecessary, local_pathify=False):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
if local_pathify:
value_list = [self.LocalPathify(l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def LocalPathify(self, path):
"""Convert a subdirectory-relative path into a normalized path which starts
with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
Absolute paths, or paths that contain variables, are just normalized."""
if '$(' in path or os.path.isabs(path):
# path is not a file in the project tree in this case, but calling
# normpath is still important for trimming trailing slashes.
return os.path.normpath(path)
local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
local_path = os.path.normpath(local_path)
# Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
# - i.e. that the resulting path is still inside the project tree. The
# path may legitimately have ended up containing just $(LOCAL_PATH), though,
# so we don't look for a slash.
assert local_path.startswith('$(LOCAL_PATH)'), (
'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
return local_path
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return os.path.normpath(path)
def PerformBuild(data, configurations, params):
# The android backend only supports the default configuration.
options = params['options']
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
'GypAndroid.mk'))
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
print 'Building: %s' % arguments
subprocess.check_call(arguments, env=env)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
limit_to_target_all = generator_flags.get('limit_to_target_all', False)
write_alias_targets = generator_flags.get('write_alias_targets', True)
sdk_version = generator_flags.get('aosp_sdk_version', 19)
android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
# We set LOCAL_PATH just once, here, to the top of the project tree. This
# allows all the other paths we use to be relative to the Android.mk file,
# as the Android build system expects.
root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
android_modules = {}
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
relative_build_file = gyp.common.RelativePath(build_file,
options.toplevel_dir)
build_files.add(relative_build_file)
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
part_of_all = qualified_target in needed_targets
if limit_to_target_all and not part_of_all:
continue
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
toolset)
writer = AndroidMkWriter(android_top_dir)
android_module = writer.Write(qualified_target, relative_target, base_path,
output_file, spec, configs,
part_of_all=part_of_all,
write_alias_target=write_alias_targets,
sdk_version=sdk_version)
if android_module in android_modules:
print ('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
return
android_modules[android_module] = qualified_target
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
root_makefile.write('GYP_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
root_makefile.write('GYP_HOST_MULTILIB ?=\n')
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
root_makefile.write('\n')
if write_alias_targets:
root_makefile.write(ALL_MODULES_FOOTER)
root_makefile.close()
| mit |
Sing-Li/go-buildpack | builds/runtimes/python-2.7.6/lib/python2.7/test/test_capi.py | 50 | 4656 | # Run the _testcapi module tests (tests for the Python/C API): by defn,
# these are all functions _testcapi exports whose name begins with 'test_'.
from __future__ import with_statement
import sys
import time
import random
import unittest
from test import test_support
try:
import thread
import threading
except ImportError:
thread = None
threading = None
import _testcapi
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
def callback():
#this function can be interrupted by thread switching so let's
#use an atomic operation
l.append(None)
for i in range(n):
time.sleep(random.random()*0.02) #0.01 secs on average
#try submitting callback until successful.
#rely on regular interrupt to flush queue if we are
#unsuccessful.
while True:
if _testcapi._pending_threadfunc(callback):
break;
def pendingcalls_wait(self, l, n, context = None):
#now, stick around until l[0] has grown to 10
count = 0;
while len(l) != n:
#this busy loop is where we expect to be interrupted to
#run our callbacks. Note that callbacks are only run on the
#main thread
if False and test_support.verbose:
print "(%i)"%(len(l),),
for i in xrange(1000):
a = i*i
if context and not context.event.is_set():
continue
count += 1
self.assertTrue(count < 10000,
"timeout waiting for %i callbacks, got %i"%(n, len(l)))
if False and test_support.verbose:
print "(%i)"%(len(l),)
def test_pendingcalls_threaded(self):
#do every callback on a separate thread
n = 32 #total callbacks
threads = []
class foo(object):pass
context = foo()
context.l = []
context.n = 2 #submits per thread
context.nThreads = n // context.n
context.nFinished = 0
context.lock = threading.Lock()
context.event = threading.Event()
for i in range(context.nThreads):
t = threading.Thread(target=self.pendingcalls_thread, args = (context,))
t.start()
threads.append(t)
self.pendingcalls_wait(context.l, n, context)
for t in threads:
t.join()
def pendingcalls_thread(self, context):
try:
self.pendingcalls_submit(context.l, context.n)
finally:
with context.lock:
context.nFinished += 1
nFinished = context.nFinished
if False and test_support.verbose:
print "finished threads: ", nFinished
if nFinished == context.nThreads:
context.event.set()
def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that!
l = []
n = 64
self.pendingcalls_submit(l, n)
self.pendingcalls_wait(l, n)
@unittest.skipUnless(threading and thread, 'Threading required for this test.')
class TestThreadState(unittest.TestCase):
@test_support.reap_threads
def test_thread_state(self):
# some extra thread-state tests driven via _testcapi
def target():
idents = []
def callback():
idents.append(thread.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
self.assertEqual(idents.count(thread.get_ident()), 3,
"Couldn't find main thread correctly in the list")
target()
t = threading.Thread(target=target)
t.start()
t.join()
def test_main():
for name in dir(_testcapi):
if name.startswith('test_'):
test = getattr(_testcapi, name)
if test_support.verbose:
print "internal", name
try:
test()
except _testcapi.error:
raise test_support.TestFailed, sys.exc_info()[1]
test_support.run_unittest(TestPendingCalls, TestThreadState)
if __name__ == "__main__":
test_main()
| mit |
azureplus/hue | desktop/core/ext-py/Django-1.6.10/django/db/backends/mysql/creation.py | 114 | 3158 | from django.db.backends.creation import BaseDatabaseCreation
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BinaryField': 'longblob',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
suffix = []
if self.connection.settings_dict['TEST_CHARSET']:
suffix.append('CHARACTER SET %s' % self.connection.settings_dict['TEST_CHARSET'])
if self.connection.settings_dict['TEST_COLLATION']:
suffix.append('COLLATE %s' % self.connection.settings_dict['TEST_COLLATION'])
return ' '.join(suffix)
def sql_for_inline_foreign_key_references(self, model, field, known_models, style):
"All inline references are pending under MySQL"
return [], True
def sql_destroy_indexes_for_fields(self, model, fields, style):
if len(fields) == 1 and fields[0].db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
elif model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
else:
tablespace_sql = ""
if tablespace_sql:
tablespace_sql = " " + tablespace_sql
field_names = []
qn = self.connection.ops.quote_name
for f in fields:
field_names.append(style.SQL_FIELD(qn(f.column)))
index_name = "%s_%s" % (model._meta.db_table, self._digest([f.name for f in fields]))
from ..util import truncate_name
return [
style.SQL_KEYWORD("DROP INDEX") + " " +
style.SQL_TABLE(qn(truncate_name(index_name, self.connection.ops.max_name_length()))) + " " +
style.SQL_KEYWORD("ON") + " " +
style.SQL_TABLE(qn(model._meta.db_table)) + ";",
]
| apache-2.0 |
Edraak/circleci-edx-platform | common/djangoapps/util/milestones_helpers.py | 1 | 14429 | # pylint: disable=invalid-name
"""
Utility library for working with the edx-milestones app
"""
from django.conf import settings
from django.utils.translation import ugettext as _
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from xmodule.modulestore.django import modulestore
NAMESPACE_CHOICES = {
'ENTRANCE_EXAM': 'entrance_exams'
}
def get_namespace_choices():
"""
Return the enum to the caller
"""
return NAMESPACE_CHOICES
def is_entrance_exams_enabled():
"""
Checks to see if the Entrance Exams feature is enabled
Use this operation instead of checking the feature flag all over the place
"""
return settings.FEATURES.get('ENTRANCE_EXAMS', False)
def is_prerequisite_courses_enabled():
"""
Returns boolean indicating prerequisite courses enabled system wide or not.
"""
return settings.FEATURES.get('ENABLE_PREREQUISITE_COURSES', False) \
and settings.FEATURES.get('MILESTONES_APP', False)
def add_prerequisite_course(course_key, prerequisite_course_key):
"""
It would create a milestone, then it would set newly created
milestones as requirement for course referred by `course_key`
and it would set newly created milestone as fulfillment
milestone for course referred by `prerequisite_course_key`.
"""
if not is_prerequisite_courses_enabled():
return None
from milestones import api as milestones_api
milestone_name = _('Course {course_id} requires {prerequisite_course_id}').format(
course_id=unicode(course_key),
prerequisite_course_id=unicode(prerequisite_course_key)
)
milestone = milestones_api.add_milestone({
'name': milestone_name,
'namespace': unicode(prerequisite_course_key),
'description': _('System defined milestone'),
})
# add requirement course milestone
milestones_api.add_course_milestone(course_key, 'requires', milestone)
# add fulfillment course milestone
milestones_api.add_course_milestone(prerequisite_course_key, 'fulfills', milestone)
def remove_prerequisite_course(course_key, milestone):
"""
It would remove pre-requisite course milestone for course
referred by `course_key`.
"""
if not is_prerequisite_courses_enabled():
return None
from milestones import api as milestones_api
milestones_api.remove_course_milestone(
course_key,
milestone,
)
def set_prerequisite_courses(course_key, prerequisite_course_keys):
"""
It would remove any existing requirement milestones for the given `course_key`
and create new milestones for each pre-requisite course in `prerequisite_course_keys`.
To only remove course milestones pass `course_key` and empty list or
None as `prerequisite_course_keys` .
"""
if not is_prerequisite_courses_enabled():
return None
from milestones import api as milestones_api
#remove any existing requirement milestones with this pre-requisite course as requirement
course_milestones = milestones_api.get_course_milestones(course_key=course_key, relationship="requires")
if course_milestones:
for milestone in course_milestones:
remove_prerequisite_course(course_key, milestone)
# add milestones if pre-requisite course is selected
if prerequisite_course_keys:
for prerequisite_course_key_string in prerequisite_course_keys:
prerequisite_course_key = CourseKey.from_string(prerequisite_course_key_string)
add_prerequisite_course(course_key, prerequisite_course_key)
def get_pre_requisite_courses_not_completed(user, enrolled_courses): # pylint: disable=invalid-name
"""
Makes a dict mapping courses to their unfulfilled milestones using the
fulfillment API of the milestones app.
Arguments:
user (User): the user for whom we are checking prerequisites.
enrolled_courses (CourseKey): a list of keys for the courses to be
checked. The given user must be enrolled in all of these courses.
Returns:
dict[CourseKey: dict[
'courses': list[dict['key': CourseKey, 'display': str]]
]]
If a course has no incomplete prerequisites, it will be excluded from the
dictionary.
"""
if not is_prerequisite_courses_enabled():
return {}
from milestones import api as milestones_api
pre_requisite_courses = {}
for course_key in enrolled_courses:
required_courses = []
fulfillment_paths = milestones_api.get_course_milestones_fulfillment_paths(course_key, {'id': user.id})
for __, milestone_value in fulfillment_paths.items():
for key, value in milestone_value.items():
if key == 'courses' and value:
for required_course in value:
required_course_key = CourseKey.from_string(required_course)
required_course_display = CourseOverview.get_from_id(required_course_key).display_name
required_courses.append({
'key': required_course_key,
'display': required_course_display
})
# If there are required courses, add them to the result dict.
if required_courses:
pre_requisite_courses[course_key] = {'courses': required_courses}
return pre_requisite_courses
def get_prerequisite_courses_display(course_descriptor):
"""
It would retrieve pre-requisite courses, make display strings
and return list of dictionary with course key as 'key' field
and course display name as `display` field.
"""
pre_requisite_courses = []
if is_prerequisite_courses_enabled() and course_descriptor.pre_requisite_courses:
for course_id in course_descriptor.pre_requisite_courses:
course_key = CourseKey.from_string(course_id)
required_course_descriptor = modulestore().get_course(course_key)
prc = {
'key': course_key,
'display': get_course_display_string(required_course_descriptor)
}
pre_requisite_courses.append(prc)
return pre_requisite_courses
def get_course_display_string(descriptor):
"""
Returns a string to display for a course or course overview.
Arguments:
descriptor (CourseDescriptor|CourseOverview): a course or course overview.
"""
return ' '.join([
descriptor.display_org_with_default,
descriptor.display_number_with_default
])
def fulfill_course_milestone(course_key, user):
"""
Marks the course specified by the given course_key as complete for the given user.
If any other courses require this course as a prerequisite, their milestones will be appropriately updated.
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
course_milestones = milestones_api.get_course_milestones(course_key=course_key, relationship="fulfills")
for milestone in course_milestones:
milestones_api.add_user_milestone({'id': user.id}, milestone)
def remove_course_milestones(course_key, user, relationship):
"""
Remove all user milestones for the course specified by course_key.
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
course_milestones = milestones_api.get_course_milestones(course_key=course_key, relationship=relationship)
for milestone in course_milestones:
milestones_api.remove_user_milestone({'id': user.id}, milestone)
def get_required_content(course, user):
"""
Queries milestones subsystem to see if the specified course is gated on one or more milestones,
and if those milestones can be fulfilled via completion of a particular course content module
"""
required_content = []
if settings.FEATURES.get('MILESTONES_APP', False):
from milestones.exceptions import InvalidMilestoneRelationshipTypeException
# Get all of the outstanding milestones for this course, for this user
try:
milestone_paths = get_course_milestones_fulfillment_paths(
unicode(course.id),
serialize_user(user)
)
except InvalidMilestoneRelationshipTypeException:
return required_content
# For each outstanding milestone, see if this content is one of its fulfillment paths
for path_key in milestone_paths:
milestone_path = milestone_paths[path_key]
if milestone_path.get('content') and len(milestone_path['content']):
for content in milestone_path['content']:
required_content.append(content)
return required_content
def milestones_achieved_by_user(user, namespace):
"""
It would fetch list of milestones completed by user
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.get_user_milestones({'id': user.id}, namespace)
def is_valid_course_key(key):
"""
validates course key. returns True if valid else False.
"""
try:
course_key = CourseKey.from_string(key)
except InvalidKeyError:
course_key = key
return isinstance(course_key, CourseKey)
def seed_milestone_relationship_types():
"""
Helper method to pre-populate MRTs so the tests can run
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones.models import MilestoneRelationshipType
MilestoneRelationshipType.objects.create(name='requires')
MilestoneRelationshipType.objects.create(name='fulfills')
def generate_milestone_namespace(namespace, course_key=None):
"""
Returns a specifically-formatted namespace string for the specified type
"""
if namespace in NAMESPACE_CHOICES.values():
if namespace == 'entrance_exams':
return '{}.{}'.format(unicode(course_key), NAMESPACE_CHOICES['ENTRANCE_EXAM'])
def serialize_user(user):
"""
Returns a milestones-friendly representation of a user object
"""
return {
'id': user.id,
}
def add_milestone(milestone_data):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.add_milestone(milestone_data)
def get_milestones(namespace):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return []
from milestones import api as milestones_api
return milestones_api.get_milestones(namespace)
def get_milestone_relationship_types():
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return {}
from milestones import api as milestones_api
return milestones_api.get_milestone_relationship_types()
def add_course_milestone(course_id, relationship, milestone):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.add_course_milestone(course_id, relationship, milestone)
def get_course_milestones(course_id):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return []
from milestones import api as milestones_api
return milestones_api.get_course_milestones(course_id)
def add_course_content_milestone(course_id, content_id, relationship, milestone):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.add_course_content_milestone(course_id, content_id, relationship, milestone)
def get_course_content_milestones(course_id, content_id, relationship):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return []
from milestones import api as milestones_api
return milestones_api.get_course_content_milestones(course_id, content_id, relationship)
def remove_course_content_user_milestones(course_key, content_key, user, relationship):
"""
Removes the specified User-Milestone link from the system for the specified course content module.
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return []
from milestones import api as milestones_api
course_content_milestones = milestones_api.get_course_content_milestones(course_key, content_key, relationship)
for milestone in course_content_milestones:
milestones_api.remove_user_milestone({'id': user.id}, milestone)
def remove_content_references(content_id):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.remove_content_references(content_id)
def any_unfulfilled_milestones(course_id, user_id):
""" Returns a boolean if user has any unfulfilled milestones """
if not settings.FEATURES.get('MILESTONES_APP', False):
return False
return bool(
get_course_milestones_fulfillment_paths(course_id, {"id": user_id})
)
def get_course_milestones_fulfillment_paths(course_id, user_id):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.get_course_milestones_fulfillment_paths(
course_id,
user_id
)
def add_user_milestone(user, milestone):
"""
Client API operation adapter/wrapper
"""
if not settings.FEATURES.get('MILESTONES_APP', False):
return None
from milestones import api as milestones_api
return milestones_api.add_user_milestone(user, milestone)
| agpl-3.0 |
ahu-odoo/odoo | addons/hr_timesheet_invoice/report/__init__.py | 433 | 1136 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_analytic_profit
import report_analytic
import hr_timesheet_invoice_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
zoidbergwill/lint-review | tests/tools/test_pep8.py | 2 | 2254 | from lintreview.review import Problems
from lintreview.review import Comment
from lintreview.tools.pep8 import Pep8
from unittest import TestCase
from nose.tools import eq_
class TestPep8(TestCase):
fixtures = [
'tests/fixtures/pep8/no_errors.py',
'tests/fixtures/pep8/has_errors.py',
]
def setUp(self):
self.problems = Problems()
self.tool = Pep8(self.problems)
def test_match_file(self):
self.assertFalse(self.tool.match_file('test.php'))
self.assertFalse(self.tool.match_file('test.js'))
self.assertFalse(self.tool.match_file('dir/name/test.js'))
self.assertTrue(self.tool.match_file('test.py'))
self.assertTrue(self.tool.match_file('dir/name/test.py'))
def test_process_files__one_file_pass(self):
self.tool.process_files([self.fixtures[0]])
eq_([], self.problems.all(self.fixtures[0]))
def test_process_files__one_file_fail(self):
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(6, len(problems))
fname = self.fixtures[1]
expected = Comment(fname, 2, 2, 'E401 multiple imports on one line')
eq_(expected, problems[0])
expected = Comment(fname, 11, 11, "W603 '<>' is deprecated, use '!='")
eq_(expected, problems[5])
def test_process_files_two_files(self):
self.tool.process_files(self.fixtures)
eq_([], self.problems.all(self.fixtures[0]))
problems = self.problems.all(self.fixtures[1])
eq_(6, len(problems))
expected = Comment(self.fixtures[1], 2, 2, 'E401 multiple imports on one line')
eq_(expected, problems[0])
expected = Comment(self.fixtures[1], 11, 11, "W603 '<>' is deprecated, use '!='")
eq_(expected, problems[5])
def test_config_options_and_process_file(self):
options = {
'ignore': 'E2,W603'
}
self.tool = Pep8(self.problems, options)
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(4, len(problems))
for p in problems:
self.assertFalse('E2' in p.body)
self.assertFalse('W603' in p.body)
| mit |
bcornwellmott/erpnext | erpnext/accounts/report/trial_balance_for_party/trial_balance_for_party.py | 34 | 5770 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint
from erpnext.accounts.report.trial_balance.trial_balance import validate_filters
def execute(filters=None):
validate_filters(filters)
show_party_name = is_party_name_visible(filters)
columns = get_columns(filters, show_party_name)
data = get_data(filters, show_party_name)
return columns, data
def get_data(filters, show_party_name):
party_name_field = "customer_name" if filters.get("party_type")=="Customer" else "supplier_name"
party_filters = {"name": filters.get("party")} if filters.get("party") else {}
parties = frappe.get_all(filters.get("party_type"), fields = ["name", party_name_field],
filters = party_filters, order_by="name")
company_currency = frappe.db.get_value("Company", filters.company, "default_currency")
opening_balances = get_opening_balances(filters)
balances_within_period = get_balances_within_period(filters)
data = []
# total_debit, total_credit = 0, 0
total_row = frappe._dict({
"opening_debit": 0,
"opening_credit": 0,
"debit": 0,
"credit": 0,
"closing_debit": 0,
"closing_credit": 0
})
for party in parties:
row = { "party": party.name }
if show_party_name:
row["party_name"] = party.get(party_name_field)
# opening
opening_debit, opening_credit = opening_balances.get(party.name, [0, 0])
row.update({
"opening_debit": opening_debit,
"opening_credit": opening_credit
})
# within period
debit, credit = balances_within_period.get(party.name, [0, 0])
row.update({
"debit": debit,
"credit": credit
})
# closing
closing_debit, closing_credit = toggle_debit_credit(opening_debit + debit, opening_credit + credit)
row.update({
"closing_debit": closing_debit,
"closing_credit": closing_credit
})
# totals
for col in total_row:
total_row[col] += row.get(col)
row.update({
"currency": company_currency
})
has_value = False
if (opening_debit or opening_credit or debit or credit or closing_debit or closing_credit):
has_value =True
if cint(filters.show_zero_values) or has_value:
data.append(row)
# Add total row
total_row.update({
"party": "'" + _("Totals") + "'",
"currency": company_currency
})
data.append(total_row)
return data
def get_opening_balances(filters):
gle = frappe.db.sql("""
select party, sum(debit) as opening_debit, sum(credit) as opening_credit
from `tabGL Entry`
where company=%(company)s
and ifnull(party_type, '') = %(party_type)s and ifnull(party, '') != ''
and (posting_date < %(from_date)s or ifnull(is_opening, 'No') = 'Yes')
group by party""", {
"company": filters.company,
"from_date": filters.from_date,
"party_type": filters.party_type
}, as_dict=True)
opening = frappe._dict()
for d in gle:
opening_debit, opening_credit = toggle_debit_credit(d.opening_debit, d.opening_credit)
opening.setdefault(d.party, [opening_debit, opening_credit])
return opening
def get_balances_within_period(filters):
gle = frappe.db.sql("""
select party, sum(debit) as debit, sum(credit) as credit
from `tabGL Entry`
where company=%(company)s
and ifnull(party_type, '') = %(party_type)s and ifnull(party, '') != ''
and posting_date >= %(from_date)s and posting_date <= %(to_date)s
and ifnull(is_opening, 'No') = 'No'
group by party""", {
"company": filters.company,
"from_date": filters.from_date,
"to_date": filters.to_date,
"party_type": filters.party_type
}, as_dict=True)
balances_within_period = frappe._dict()
for d in gle:
balances_within_period.setdefault(d.party, [d.debit, d.credit])
return balances_within_period
def toggle_debit_credit(debit, credit):
if flt(debit) > flt(credit):
debit = flt(debit) - flt(credit)
credit = 0.0
else:
credit = flt(credit) - flt(debit)
debit = 0.0
return debit, credit
def get_columns(filters, show_party_name):
columns = [
{
"fieldname": "party",
"label": _(filters.party_type),
"fieldtype": "Link",
"options": filters.party_type,
"width": 200
},
{
"fieldname": "opening_debit",
"label": _("Opening (Dr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "opening_credit",
"label": _("Opening (Cr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "debit",
"label": _("Debit"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "credit",
"label": _("Credit"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "closing_debit",
"label": _("Closing (Dr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "closing_credit",
"label": _("Closing (Cr)"),
"fieldtype": "Currency",
"options": "currency",
"width": 120
},
{
"fieldname": "currency",
"label": _("Currency"),
"fieldtype": "Link",
"options": "Currency",
"hidden": 1
}
]
if show_party_name:
columns.insert(1, {
"fieldname": "party_name",
"label": _(filters.party_type) + " Name",
"fieldtype": "Data",
"width": 200
})
return columns
def is_party_name_visible(filters):
show_party_name = False
if filters.get("party_type") == "Customer":
party_naming_by = frappe.db.get_single_value("Selling Settings", "cust_master_name")
else:
party_naming_by = frappe.db.get_single_value("Buying Settings", "supp_master_name")
if party_naming_by == "Naming Series":
show_party_name = True
return show_party_name | gpl-3.0 |
nooperpudd/pulsar | pulsar/apps/data/pulsards/startds.py | 5 | 1813 | import asyncio
from pulsar import when_monitor_start, get_application, task, send
from pulsar.apps.data import create_store
from pulsar.apps.ds import PulsarDS
def start_pulsar_ds(arbiter, host, workers=0):
lock = getattr(arbiter, 'lock', None)
if lock is None:
arbiter.lock = lock = asyncio.Lock()
yield from lock.acquire()
try:
app = yield from get_application('pulsards')
if not app:
app = PulsarDS(bind=host, workers=workers, load_config=False)
cfg = yield from app(arbiter)
else:
cfg = app.cfg
return cfg
finally:
lock.release()
@task
def start_store(app, url, workers=0, **kw):
'''Equivalent to :func:`.create_store` for most cases excepts when the
``url`` is for a pulsar store not yet started.
In this case, a :class:`.PulsarDS` is started.
'''
store = create_store(url, **kw)
if store.name == 'pulsar':
client = store.client()
try:
yield from client.ping()
except ConnectionRefusedError:
host = localhost(store._host)
if not host:
raise
cfg = yield from send('arbiter', 'run', start_pulsar_ds,
host, workers)
store._host = cfg.addresses[0]
dns = store._buildurl()
store = create_store(dns, **kw)
app.cfg.set('data_store', store.dns)
def localhost(host):
if isinstance(host, tuple):
if host[0] in ('127.0.0.1', ''):
return ':'.join((str(b) for b in host))
else:
return host
def _start_store(monitor):
app = monitor.app
if not isinstance(app, PulsarDS) and app.cfg.data_store:
start_store(app, app.cfg.data_store)
when_monitor_start.append(_start_store)
| bsd-3-clause |
adelina-t/nova | nova/tests/unit/cells/test_cells_filters.py | 30 | 7223 | # Copyright (c) 2012-2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for cells scheduler filters.
"""
from nova.cells import filters
from nova import context
from nova.db.sqlalchemy import models
from nova import test
from nova.tests.unit.cells import fakes
class FiltersTestCase(test.NoDBTestCase):
"""Makes sure the proper filters are in the directory."""
def test_all_filters(self):
filter_classes = filters.all_filters()
class_names = [cls.__name__ for cls in filter_classes]
self.assertIn("TargetCellFilter", class_names)
class _FilterTestClass(test.NoDBTestCase):
"""Base class for testing individual filter plugins."""
filter_cls_name = None
def setUp(self):
super(_FilterTestClass, self).setUp()
fakes.init(self)
self.msg_runner = fakes.get_message_runner('api-cell')
self.scheduler = self.msg_runner.scheduler
self.my_cell_state = self.msg_runner.state_manager.get_my_state()
self.filter_handler = filters.CellFilterHandler()
filter_classes = self.filter_handler.get_matching_classes(
[self.filter_cls_name])
self.filters = [cls() for cls in filter_classes]
self.context = context.RequestContext('fake', 'fake',
is_admin=True)
def _filter_cells(self, cells, filter_properties):
return self.filter_handler.get_filtered_objects(self.filters,
cells,
filter_properties)
class ImagePropertiesFilter(_FilterTestClass):
filter_cls_name = \
'nova.cells.filters.image_properties.ImagePropertiesFilter'
def setUp(self):
super(ImagePropertiesFilter, self).setUp()
self.cell1 = models.Cell()
self.cell2 = models.Cell()
self.cell3 = models.Cell()
self.cells = [self.cell1, self.cell2, self.cell3]
for cell in self.cells:
cell.capabilities = {}
self.filter_props = {'context': self.context, 'request_spec': {}}
def test_missing_image_properties(self):
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_missing_hypervisor_version_requires(self):
self.filter_props['request_spec'] = {'image': {'properties': {}}}
for cell in self.cells:
cell.capabilities = {"prominent_hypervisor_version": set([u"6.2"])}
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_missing_hypervisor_version_in_cells(self):
image = {'properties': {'hypervisor_version_requires': '>6.2.1'}}
self.filter_props['request_spec'] = {'image': image}
self.cell1.capabilities = {"prominent_hypervisor_version": set([])}
self.assertEqual(self.cells,
self._filter_cells(self.cells, self.filter_props))
def test_cells_matching_hypervisor_version(self):
image = {'properties': {'hypervisor_version_requires': '>6.0, <=6.3'}}
self.filter_props['request_spec'] = {'image': image}
self.cell1.capabilities = {"prominent_hypervisor_version":
set([u"6.2"])}
self.cell2.capabilities = {"prominent_hypervisor_version":
set([u"6.3"])}
self.cell3.capabilities = {"prominent_hypervisor_version":
set([u"6.0"])}
self.assertEqual([self.cell1, self.cell2],
self._filter_cells(self.cells, self.filter_props))
# assert again to verify filter doesn't mutate state
# LP bug #1325705
self.assertEqual([self.cell1, self.cell2],
self._filter_cells(self.cells, self.filter_props))
class TestTargetCellFilter(_FilterTestClass):
filter_cls_name = 'nova.cells.filters.target_cell.TargetCellFilter'
def test_missing_scheduler_hints(self):
cells = [1, 2, 3]
# No filtering
filter_props = {'context': self.context}
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_no_target_cell_hint(self):
cells = [1, 2, 3]
filter_props = {'scheduler_hints': {},
'context': self.context}
# No filtering
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_target_cell_specified_me(self):
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'fake!cell!path'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': self.context}
# Only myself in the list.
self.assertEqual([self.my_cell_state],
self._filter_cells(cells, filter_props))
def test_target_cell_specified_me_but_not_admin(self):
ctxt = context.RequestContext('fake', 'fake')
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'fake!cell!path'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': ctxt}
# No filtering, because not an admin.
self.assertEqual(cells, self._filter_cells(cells, filter_props))
def test_target_cell_specified_not_me(self):
info = {}
def _fake_build_instances(ctxt, cell, sched_kwargs):
info['ctxt'] = ctxt
info['cell'] = cell
info['sched_kwargs'] = sched_kwargs
self.stubs.Set(self.msg_runner, 'build_instances',
_fake_build_instances)
cells = [1, 2, 3]
target_cell = 'fake!cell!path'
current_cell = 'not!the!same'
filter_props = {'scheduler_hints': {'target_cell': target_cell},
'routing_path': current_cell,
'scheduler': self.scheduler,
'context': self.context,
'host_sched_kwargs': 'meow'}
# None is returned to bypass further scheduling.
self.assertIsNone(self._filter_cells(cells, filter_props))
# The filter should have re-scheduled to the child cell itself.
expected_info = {'ctxt': self.context,
'cell': 'fake!cell!path',
'sched_kwargs': 'meow'}
self.assertEqual(expected_info, info)
| apache-2.0 |
alexmandujano/django | django/contrib/gis/db/backends/base.py | 112 | 11144 | """
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = set()
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_lookup_sql(self, lvalue, lookup_type, value, field):
raise NotImplementedError
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError
def spatial_ref_sys(self):
raise NotImplementedError
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m: return (float(m.group('major')), float(m.group('flattening')))
else: return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except:
return six.text_type(self.wkt)
| bsd-3-clause |
halvertoluke/edx-platform | lms/djangoapps/branding/tests/test_models.py | 134 | 1938 | """
Tests for the Video Branding configuration.
"""
from django.test import TestCase
from django.core.exceptions import ValidationError
from nose.plugins.attrib import attr
from branding.models import BrandingInfoConfig
@attr('shard_1')
class BrandingInfoConfigTest(TestCase):
"""
Test the BrandingInfoConfig model.
"""
def setUp(self):
super(BrandingInfoConfigTest, self).setUp()
self.configuration_string = """{
"CN": {
"url": "http://www.xuetangx.com",
"logo_src": "http://www.xuetangx.com/static/images/logo.png",
"logo_tag": "Video hosted by XuetangX.com"
}
}"""
self.config = BrandingInfoConfig(configuration=self.configuration_string)
def test_create(self):
"""
Tests creation of configuration.
"""
self.config.save()
self.assertEquals(self.config.configuration, self.configuration_string)
def test_clean_bad_json(self):
"""
Tests if bad Json string was given.
"""
self.config = BrandingInfoConfig(configuration='{"bad":"test"')
self.assertRaises(ValidationError, self.config.clean)
def test_get(self):
"""
Tests get configuration from saved string.
"""
self.config.enabled = True
self.config.save()
expected_config = {
"CN": {
"url": "http://www.xuetangx.com",
"logo_src": "http://www.xuetangx.com/static/images/logo.png",
"logo_tag": "Video hosted by XuetangX.com"
}
}
self.assertEquals(self.config.get_config(), expected_config)
def test_get_not_enabled(self):
"""
Tests get configuration that is not enabled.
"""
self.config.enabled = False
self.config.save()
self.assertEquals(self.config.get_config(), {})
| agpl-3.0 |
ypu/tp-qemu | qemu/tests/timerdevice_boot.py | 3 | 8045 | import logging
import re
import time
from autotest.client.shared import error
from autotest.client import utils
from virttest import data_dir, storage, utils_disk, utils_test, env_process
from virttest import funcatexit
@error.context_aware
def run(test, params, env):
"""
Timer device boot guest:
1) Sync the host system time with ntp server
2) Add some load on host (Optional)
3) Boot the guest with specific clock source
4) Check the clock source currently used on guest
5) Do some file operation on guest (Optional)
6) Check the system time on guest and host (Optional)
7) Check the hardware time on guest and host (Optional)
8) Sleep period of time before reboot (Optional)
9) Reboot guest (Optional)
10) Check the system time on guest and host (Optional)
11) Check the hardware time on guest and host (Optional)
:param test: QEMU test object.
:param params: Dictionary with test parameters.
:param env: Dictionary with the test environment.
"""
def verify_guest_clock_source(session, expected):
error.context("Check the current clocksource in guest", logging.info)
cmd = "cat /sys/devices/system/clocksource/"
cmd += "clocksource0/current_clocksource"
if not expected in session.cmd(cmd):
raise error.TestFail(
"Guest didn't use '%s' clocksource" % expected)
error.context("Sync the host system time with ntp server", logging.info)
utils.system("ntpdate clock.redhat.com")
timerdevice_host_load_cmd = params.get("timerdevice_host_load_cmd")
if timerdevice_host_load_cmd:
error.context("Add some load on host", logging.info)
utils.system(timerdevice_host_load_cmd)
host_load_stop_cmd = params["timerdevice_host_load_stop_cmd"]
funcatexit.register(env, params["type"], utils.system,
host_load_stop_cmd)
error.context("Boot a guest with kvm-clock", logging.info)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
timerdevice_clksource = params.get("timerdevice_clksource")
if timerdevice_clksource:
try:
verify_guest_clock_source(session, timerdevice_clksource)
except Exception:
clksrc = timerdevice_clksource
error.context("Shutdown guest")
vm.destroy()
env.unregister_vm(vm.name)
error.context("Update guest kernel cli to '%s'" % clksrc,
logging.info)
image_filename = storage.get_image_filename(params,
data_dir.get_data_dir())
grub_file = params.get("grub_file", "/boot/grub2/grub.cfg")
kernel_cfg_pattern = params.get("kernel_cfg_pos_reg",
r".*vmlinuz-\d+.*")
disk_obj = utils_disk.GuestFSModiDisk(image_filename)
kernel_cfg_original = disk_obj.read_file(grub_file)
try:
logging.warn("Update the first kernel entry to"
" '%s' only" % clksrc)
kernel_cfg = re.findall(kernel_cfg_pattern,
kernel_cfg_original)[0]
except IndexError, detail:
raise error.TestError("Couldn't find the kernel config, regex"
" pattern is '%s', detail: '%s'" %
(kernel_cfg_pattern, detail))
if "clocksource=" in kernel_cfg:
kernel_cfg_new = re.sub("clocksource=.*?\s",
"clocksource=%s" % clksrc, kernel_cfg)
else:
kernel_cfg_new = "%s %s" % (kernel_cfg,
"clocksource=%s" % clksrc)
disk_obj.replace_image_file_content(grub_file, kernel_cfg,
kernel_cfg_new)
error.context("Boot the guest", logging.info)
vm_name = params["main_vm"]
cpu_model_flags = params.get("cpu_model_flags")
params["cpu_model_flags"] = cpu_model_flags + ",-kvmclock"
env_process.preprocess_vm(test, params, env, vm_name)
vm = env.get_vm(vm_name)
vm.verify_alive()
session = vm.wait_for_login(timeout=timeout)
error.context("Check the current clocksource in guest",
logging.info)
verify_guest_clock_source(session, clksrc)
error.context("Kill all ntp related processes")
session.cmd("pkill ntp; true")
if params.get("timerdevice_file_operation") == "yes":
error.context("Do some file operation on guest", logging.info)
session.cmd("dd if=/dev/zero of=/tmp/timer-test-file bs=1M count=100")
return
# Command to run to get the current time
time_command = params["time_command"]
# Filter which should match a string to be passed to time.strptime()
time_filter_re = params["time_filter_re"]
# Time format for time.strptime()
time_format = params["time_format"]
timerdevice_drift_threshold = params.get("timerdevice_drift_threshold", 3)
error.context("Check the system time on guest and host", logging.info)
(host_time, guest_time) = utils_test.get_time(session, time_command,
time_filter_re, time_format)
drift = abs(float(host_time) - float(guest_time))
if drift > timerdevice_drift_threshold:
raise error.TestFail("The guest's system time is different with"
" host's. Host time: '%s', guest time:"
" '%s'" % (host_time, guest_time))
get_hw_time_cmd = params.get("get_hw_time_cmd")
if get_hw_time_cmd:
error.context(
"Check the hardware time on guest and host", logging.info)
host_time = utils.system_output(get_hw_time_cmd)
guest_time = session.cmd(get_hw_time_cmd)
drift = abs(float(host_time) - float(guest_time))
if drift > timerdevice_drift_threshold:
raise error.TestFail("The guest's hardware time is different with"
" host's. Host time: '%s', guest time:"
" '%s'" % (host_time, guest_time))
if params.get("timerdevice_reboot_test") == "yes":
sleep_time = params.get("timerdevice_sleep_time")
if sleep_time:
error.context("Sleep '%s' secs before reboot" % sleep_time,
logging.info)
sleep_time = int(sleep_time)
time.sleep(sleep_time)
session = vm.reboot()
error.context("Check the system time on guest and host", logging.info)
(host_time, guest_time) = utils_test.get_time(session, time_command,
time_filter_re, time_format)
drift = abs(float(host_time) - float(guest_time))
if drift > timerdevice_drift_threshold:
raise error.TestFail("The guest's system time is different with"
" host's. Host time: '%s', guest time:"
" '%s'" % (host_time, guest_time))
get_hw_time_cmd = params.get("get_hw_time_cmd")
if get_hw_time_cmd:
error.context(
"Check the hardware time on guest and host", logging.info)
host_time = utils.system_output(get_hw_time_cmd)
guest_time = session.cmd(get_hw_time_cmd)
drift = abs(float(host_time) - float(guest_time))
if drift > timerdevice_drift_threshold:
raise error.TestFail("The guest's hardware time is different with"
" host's. Host time: '%s', guest time:"
" '%s'" % (host_time, guest_time))
| gpl-2.0 |
ayesandarmoe/microblog_flask_tutorial | flask/lib/python2.7/codecs.py | 33 | 35555 | """ codecs -- Python Codec Registry, API and helpers.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import __builtin__, sys
### Registry and builtin stateless codec functions
try:
from _codecs import *
except ImportError, why:
raise SystemError('Failed to load the builtin codecs: %s' % why)
__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
"BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
"BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
"BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
"CodecInfo", "Codec", "IncrementalEncoder", "IncrementalDecoder",
"StreamReader", "StreamWriter",
"StreamReaderWriter", "StreamRecoder",
"getencoder", "getdecoder", "getincrementalencoder",
"getincrementaldecoder", "getreader", "getwriter",
"encode", "decode", "iterencode", "iterdecode",
"strict_errors", "ignore_errors", "replace_errors",
"xmlcharrefreplace_errors", "backslashreplace_errors",
"register_error", "lookup_error"]
### Constants
#
# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
# and its possible byte string values
# for UTF8/UTF16/UTF32 output and little/big endian machines
#
# UTF-8
BOM_UTF8 = '\xef\xbb\xbf'
# UTF-16, little endian
BOM_LE = BOM_UTF16_LE = '\xff\xfe'
# UTF-16, big endian
BOM_BE = BOM_UTF16_BE = '\xfe\xff'
# UTF-32, little endian
BOM_UTF32_LE = '\xff\xfe\x00\x00'
# UTF-32, big endian
BOM_UTF32_BE = '\x00\x00\xfe\xff'
if sys.byteorder == 'little':
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_LE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_LE
else:
# UTF-16, native endianness
BOM = BOM_UTF16 = BOM_UTF16_BE
# UTF-32, native endianness
BOM_UTF32 = BOM_UTF32_BE
# Old broken names (don't use in new code)
BOM32_LE = BOM_UTF16_LE
BOM32_BE = BOM_UTF16_BE
BOM64_LE = BOM_UTF32_LE
BOM64_BE = BOM_UTF32_BE
### Codec base classes (defining the API)
class CodecInfo(tuple):
def __new__(cls, encode, decode, streamreader=None, streamwriter=None,
incrementalencoder=None, incrementaldecoder=None, name=None):
self = tuple.__new__(cls, (encode, decode, streamreader, streamwriter))
self.name = name
self.encode = encode
self.decode = decode
self.incrementalencoder = incrementalencoder
self.incrementaldecoder = incrementaldecoder
self.streamwriter = streamwriter
self.streamreader = streamreader
return self
def __repr__(self):
return "<%s.%s object for encoding %s at 0x%x>" % (self.__class__.__module__, self.__class__.__name__, self.name, id(self))
class Codec:
""" Defines the interface for stateless encoders/decoders.
The .encode()/.decode() methods may use different error
handling schemes by providing the errors argument. These
string values are predefined:
'strict' - raise a ValueError error (or a subclass)
'ignore' - ignore the character and continue with the next
'replace' - replace with a suitable replacement character;
Python will use the official U+FFFD REPLACEMENT
CHARACTER for the builtin Unicode codecs on
decoding and '?' on encoding.
'xmlcharrefreplace' - Replace with the appropriate XML
character reference (only for encoding).
'backslashreplace' - Replace with backslashed escape sequences
(only for encoding).
The set of allowed values can be extended via register_error.
"""
def encode(self, input, errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The encoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
def decode(self, input, errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling.
The method may not store state in the Codec instance. Use
StreamCodec for codecs which have to keep state in order to
make encoding/decoding efficient.
The decoder must be able to handle zero length input and
return an empty object of the output object type in this
situation.
"""
raise NotImplementedError
class IncrementalEncoder(object):
"""
An IncrementalEncoder encodes an input in multiple steps. The input can be
passed piece by piece to the encode() method. The IncrementalEncoder remembers
the state of the Encoding process between calls to encode().
"""
def __init__(self, errors='strict'):
"""
Creates an IncrementalEncoder instance.
The IncrementalEncoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
self.buffer = ""
def encode(self, input, final=False):
"""
Encodes input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Resets the encoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the encoder.
"""
return 0
def setstate(self, state):
"""
Set the current state of the encoder. state must have been
returned by getstate().
"""
class BufferedIncrementalEncoder(IncrementalEncoder):
"""
This subclass of IncrementalEncoder can be used as the baseclass for an
incremental encoder if the encoder must keep some of the output in a
buffer between calls to encode().
"""
def __init__(self, errors='strict'):
IncrementalEncoder.__init__(self, errors)
self.buffer = "" # unencoded input that is kept between calls to encode()
def _buffer_encode(self, input, errors, final):
# Overwrite this method in subclasses: It must encode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def encode(self, input, final=False):
# encode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_encode(data, self.errors, final)
# keep unencoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalEncoder.reset(self)
self.buffer = ""
def getstate(self):
return self.buffer or 0
def setstate(self, state):
self.buffer = state or ""
class IncrementalDecoder(object):
"""
An IncrementalDecoder decodes an input in multiple steps. The input can be
passed piece by piece to the decode() method. The IncrementalDecoder
remembers the state of the decoding process between calls to decode().
"""
def __init__(self, errors='strict'):
"""
Creates a IncrementalDecoder instance.
The IncrementalDecoder may use different error handling schemes by
providing the errors keyword argument. See the module docstring
for a list of possible values.
"""
self.errors = errors
def decode(self, input, final=False):
"""
Decodes input and returns the resulting object.
"""
raise NotImplementedError
def reset(self):
"""
Resets the decoder to the initial state.
"""
def getstate(self):
"""
Return the current state of the decoder.
This must be a (buffered_input, additional_state_info) tuple.
buffered_input must be a bytes object containing bytes that
were passed to decode() that have not yet been converted.
additional_state_info must be a non-negative integer
representing the state of the decoder WITHOUT yet having
processed the contents of buffered_input. In the initial state
and after reset(), getstate() must return (b"", 0).
"""
return (b"", 0)
def setstate(self, state):
"""
Set the current state of the decoder.
state must have been returned by getstate(). The effect of
setstate((b"", 0)) must be equivalent to reset().
"""
class BufferedIncrementalDecoder(IncrementalDecoder):
"""
This subclass of IncrementalDecoder can be used as the baseclass for an
incremental decoder if the decoder must be able to handle incomplete byte
sequences.
"""
def __init__(self, errors='strict'):
IncrementalDecoder.__init__(self, errors)
self.buffer = "" # undecoded input that is kept between calls to decode()
def _buffer_decode(self, input, errors, final):
# Overwrite this method in subclasses: It must decode input
# and return an (output, length consumed) tuple
raise NotImplementedError
def decode(self, input, final=False):
# decode input (taking the buffer into account)
data = self.buffer + input
(result, consumed) = self._buffer_decode(data, self.errors, final)
# keep undecoded input until the next call
self.buffer = data[consumed:]
return result
def reset(self):
IncrementalDecoder.reset(self)
self.buffer = ""
def getstate(self):
# additional state info is always 0
return (self.buffer, 0)
def setstate(self, state):
# ignore additional state info
self.buffer = state[0]
#
# The StreamWriter and StreamReader class provide generic working
# interfaces which can be used to implement new encoding submodules
# very easily. See encodings/utf_8.py for an example on how this is
# done.
#
class StreamWriter(Codec):
def __init__(self, stream, errors='strict'):
""" Creates a StreamWriter instance.
stream must be a file-like object open for writing
(binary) data.
The StreamWriter may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character
'xmlcharrefreplace' - Replace with the appropriate XML
character reference.
'backslashreplace' - Replace with backslashed escape
sequences (only for encoding).
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
def write(self, object):
""" Writes the object's contents encoded to self.stream.
"""
data, consumed = self.encode(object, self.errors)
self.stream.write(data)
def writelines(self, list):
""" Writes the concatenated list of strings to the stream
using .write().
"""
self.write(''.join(list))
def reset(self):
""" Flushes and resets the codec buffers used for keeping state.
Calling this method should ensure that the data on the
output is put into a clean state, that allows appending
of new fresh data without having to rescan the whole
stream to recover state.
"""
pass
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
if whence == 0 and offset == 0:
self.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReader(Codec):
def __init__(self, stream, errors='strict'):
""" Creates a StreamReader instance.
stream must be a file-like object open for reading
(binary) data.
The StreamReader may use different error handling
schemes by providing the errors keyword argument. These
parameters are predefined:
'strict' - raise a ValueError (or a subclass)
'ignore' - ignore the character and continue with the next
'replace'- replace with a suitable replacement character;
The set of allowed parameter values can be extended via
register_error.
"""
self.stream = stream
self.errors = errors
self.bytebuffer = ""
# For str->str decoding this will stay a str
# For str->unicode decoding the first read will promote it to unicode
self.charbuffer = ""
self.linebuffer = None
def decode(self, input, errors='strict'):
raise NotImplementedError
def read(self, size=-1, chars=-1, firstline=False):
""" Decodes data from the stream self.stream and returns the
resulting object.
chars indicates the number of characters to read from the
stream. read() will never return more than chars
characters, but it might return less, if there are not enough
characters available.
size indicates the approximate maximum number of bytes to
read from the stream for decoding purposes. The decoder
can modify this setting as appropriate. The default value
-1 indicates to read and decode as much as possible. size
is intended to prevent having to decode huge files in one
step.
If firstline is true, and a UnicodeDecodeError happens
after the first line terminator in the input only the first line
will be returned, the rest of the input will be kept until the
next call to read().
The method should use a greedy read strategy meaning that
it should read as much data as is allowed within the
definition of the encoding and the given size, e.g. if
optional encoding endings or state markers are available
on the stream, these should be read too.
"""
# If we have lines cached, first merge them back into characters
if self.linebuffer:
self.charbuffer = "".join(self.linebuffer)
self.linebuffer = None
# read until we get the required number of characters (if available)
while True:
# can the request be satisfied from the character buffer?
if chars >= 0:
if len(self.charbuffer) >= chars:
break
elif size >= 0:
if len(self.charbuffer) >= size:
break
# we need more data
if size < 0:
newdata = self.stream.read()
else:
newdata = self.stream.read(size)
# decode bytes (those remaining from the last call included)
data = self.bytebuffer + newdata
try:
newchars, decodedbytes = self.decode(data, self.errors)
except UnicodeDecodeError, exc:
if firstline:
newchars, decodedbytes = self.decode(data[:exc.start], self.errors)
lines = newchars.splitlines(True)
if len(lines)<=1:
raise
else:
raise
# keep undecoded bytes until the next call
self.bytebuffer = data[decodedbytes:]
# put new characters in the character buffer
self.charbuffer += newchars
# there was no data available
if not newdata:
break
if chars < 0:
# Return everything we've got
result = self.charbuffer
self.charbuffer = ""
else:
# Return the first chars characters
result = self.charbuffer[:chars]
self.charbuffer = self.charbuffer[chars:]
return result
def readline(self, size=None, keepends=True):
""" Read one line from the input stream and return the
decoded data.
size, if given, is passed as size argument to the
read() method.
"""
# If we have lines cached from an earlier read, return
# them unconditionally
if self.linebuffer:
line = self.linebuffer[0]
del self.linebuffer[0]
if len(self.linebuffer) == 1:
# revert to charbuffer mode; we might need more data
# next time
self.charbuffer = self.linebuffer[0]
self.linebuffer = None
if not keepends:
line = line.splitlines(False)[0]
return line
readsize = size or 72
line = ""
# If size is given, we call read() only once
while True:
data = self.read(readsize, firstline=True)
if data:
# If we're at a "\r" read one extra character (which might
# be a "\n") to get a proper line ending. If the stream is
# temporarily exhausted we return the wrong line ending.
if data.endswith("\r"):
data += self.read(size=1, chars=1)
line += data
lines = line.splitlines(True)
if lines:
if len(lines) > 1:
# More than one line result; the first line is a full line
# to return
line = lines[0]
del lines[0]
if len(lines) > 1:
# cache the remaining lines
lines[-1] += self.charbuffer
self.linebuffer = lines
self.charbuffer = None
else:
# only one remaining line, put it back into charbuffer
self.charbuffer = lines[0] + self.charbuffer
if not keepends:
line = line.splitlines(False)[0]
break
line0withend = lines[0]
line0withoutend = lines[0].splitlines(False)[0]
if line0withend != line0withoutend: # We really have a line end
# Put the rest back together and keep it until the next call
self.charbuffer = "".join(lines[1:]) + self.charbuffer
if keepends:
line = line0withend
else:
line = line0withoutend
break
# we didn't get anything or this was our only try
if not data or size is not None:
if line and not keepends:
line = line.splitlines(False)[0]
break
if readsize<8000:
readsize *= 2
return line
def readlines(self, sizehint=None, keepends=True):
""" Read all lines available on the input stream
and return them as list of lines.
Line breaks are implemented using the codec's decoder
method and are included in the list entries.
sizehint, if given, is ignored since there is no efficient
way to finding the true end-of-line.
"""
data = self.read()
return data.splitlines(keepends)
def reset(self):
""" Resets the codec buffers used for keeping state.
Note that no stream repositioning should take place.
This method is primarily intended to be able to recover
from decoding errors.
"""
self.bytebuffer = ""
self.charbuffer = u""
self.linebuffer = None
def seek(self, offset, whence=0):
""" Set the input stream's current position.
Resets the codec buffers used for keeping state.
"""
self.stream.seek(offset, whence)
self.reset()
def next(self):
""" Return the next decoded line from the input stream."""
line = self.readline()
if line:
return line
raise StopIteration
def __iter__(self):
return self
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamReaderWriter:
""" StreamReaderWriter instances allow wrapping streams which
work in both read and write modes.
The design is such that one can use the factory functions
returned by the codec.lookup() function to construct the
instance.
"""
# Optional attributes set by the file wrappers below
encoding = 'unknown'
def __init__(self, stream, Reader, Writer, errors='strict'):
""" Creates a StreamReaderWriter instance.
stream must be a Stream-like object.
Reader, Writer must be factory functions or classes
providing the StreamReader, StreamWriter interface resp.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
return self.reader.read(size)
def readline(self, size=None):
return self.reader.readline(size)
def readlines(self, sizehint=None):
return self.reader.readlines(sizehint)
def next(self):
""" Return the next decoded line from the input stream."""
return self.reader.next()
def __iter__(self):
return self
def write(self, data):
return self.writer.write(data)
def writelines(self, list):
return self.writer.writelines(list)
def reset(self):
self.reader.reset()
self.writer.reset()
def seek(self, offset, whence=0):
self.stream.seek(offset, whence)
self.reader.reset()
if whence == 0 and offset == 0:
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
# these are needed to make "with codecs.open(...)" work properly
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
###
class StreamRecoder:
""" StreamRecoder instances provide a frontend - backend
view of encoding data.
They use the complete set of APIs returned by the
codecs.lookup() function to implement their task.
Data written to the stream is first decoded into an
intermediate format (which is dependent on the given codec
combination) and then written to the stream using an instance
of the provided Writer class.
In the other direction, data is read from the stream using a
Reader instance and then return encoded data to the caller.
"""
# Optional attributes set by the file wrappers below
data_encoding = 'unknown'
file_encoding = 'unknown'
def __init__(self, stream, encode, decode, Reader, Writer,
errors='strict'):
""" Creates a StreamRecoder instance which implements a two-way
conversion: encode and decode work on the frontend (the
input to .read() and output of .write()) while
Reader and Writer work on the backend (reading and
writing to the stream).
You can use these objects to do transparent direct
recodings from e.g. latin-1 to utf-8 and back.
stream must be a file-like object.
encode, decode must adhere to the Codec interface, Reader,
Writer must be factory functions or classes providing the
StreamReader, StreamWriter interface resp.
encode and decode are needed for the frontend translation,
Reader and Writer for the backend translation. Unicode is
used as intermediate encoding.
Error handling is done in the same way as defined for the
StreamWriter/Readers.
"""
self.stream = stream
self.encode = encode
self.decode = decode
self.reader = Reader(stream, errors)
self.writer = Writer(stream, errors)
self.errors = errors
def read(self, size=-1):
data = self.reader.read(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readline(self, size=None):
if size is None:
data = self.reader.readline()
else:
data = self.reader.readline(size)
data, bytesencoded = self.encode(data, self.errors)
return data
def readlines(self, sizehint=None):
data = self.reader.read()
data, bytesencoded = self.encode(data, self.errors)
return data.splitlines(1)
def next(self):
""" Return the next decoded line from the input stream."""
data = self.reader.next()
data, bytesencoded = self.encode(data, self.errors)
return data
def __iter__(self):
return self
def write(self, data):
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def writelines(self, list):
data = ''.join(list)
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
def reset(self):
self.reader.reset()
self.writer.reset()
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.stream.close()
### Shortcuts
def open(filename, mode='rb', encoding=None, errors='strict', buffering=1):
""" Open an encoded file using the given mode and return
a wrapped version providing transparent encoding/decoding.
Note: The wrapped version will only accept the object format
defined by the codecs, i.e. Unicode objects for most builtin
codecs. Output is also codec dependent and will usually be
Unicode as well.
Files are always opened in binary mode, even if no binary mode
was specified. This is done to avoid data loss due to encodings
using 8-bit values. The default file mode is 'rb' meaning to
open the file in binary read mode.
encoding specifies the encoding which is to be used for the
file.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
buffering has the same meaning as for the builtin open() API.
It defaults to line buffered.
The returned wrapped file object provides an extra attribute
.encoding which allows querying the used encoding. This
attribute is only available if an encoding was specified as
parameter.
"""
if encoding is not None:
if 'U' in mode:
# No automatic conversion of '\n' is done on reading and writing
mode = mode.strip().replace('U', '')
if mode[:1] not in set('rwa'):
mode = 'r' + mode
if 'b' not in mode:
# Force opening of the file in binary mode
mode = mode + 'b'
file = __builtin__.open(filename, mode, buffering)
if encoding is None:
return file
info = lookup(encoding)
srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
# Add attributes to simplify introspection
srw.encoding = encoding
return srw
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
""" Return a wrapped version of file which provides transparent
encoding translation.
Strings written to the wrapped file are interpreted according
to the given data_encoding and then written to the original
file as string using file_encoding. The intermediate encoding
will usually be Unicode but depends on the specified codecs.
Strings are read from the file using file_encoding and then
passed back to the caller as string using data_encoding.
If file_encoding is not given, it defaults to data_encoding.
errors may be given to define the error handling. It defaults
to 'strict' which causes ValueErrors to be raised in case an
encoding error occurs.
The returned wrapped file object provides two extra attributes
.data_encoding and .file_encoding which reflect the given
parameters of the same name. The attributes can be used for
introspection by Python programs.
"""
if file_encoding is None:
file_encoding = data_encoding
data_info = lookup(data_encoding)
file_info = lookup(file_encoding)
sr = StreamRecoder(file, data_info.encode, data_info.decode,
file_info.streamreader, file_info.streamwriter, errors)
# Add attributes to simplify introspection
sr.data_encoding = data_encoding
sr.file_encoding = file_encoding
return sr
### Helpers for codec lookup
def getencoder(encoding):
""" Lookup up the codec for the given encoding and return
its encoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).encode
def getdecoder(encoding):
""" Lookup up the codec for the given encoding and return
its decoder function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).decode
def getincrementalencoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalEncoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental encoder.
"""
encoder = lookup(encoding).incrementalencoder
if encoder is None:
raise LookupError(encoding)
return encoder
def getincrementaldecoder(encoding):
""" Lookup up the codec for the given encoding and return
its IncrementalDecoder class or factory function.
Raises a LookupError in case the encoding cannot be found
or the codecs doesn't provide an incremental decoder.
"""
decoder = lookup(encoding).incrementaldecoder
if decoder is None:
raise LookupError(encoding)
return decoder
def getreader(encoding):
""" Lookup up the codec for the given encoding and return
its StreamReader class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamreader
def getwriter(encoding):
""" Lookup up the codec for the given encoding and return
its StreamWriter class or factory function.
Raises a LookupError in case the encoding cannot be found.
"""
return lookup(encoding).streamwriter
def iterencode(iterator, encoding, errors='strict', **kwargs):
"""
Encoding iterator.
Encodes the input strings from the iterator using a IncrementalEncoder.
errors and kwargs are passed through to the IncrementalEncoder
constructor.
"""
encoder = getincrementalencoder(encoding)(errors, **kwargs)
for input in iterator:
output = encoder.encode(input)
if output:
yield output
output = encoder.encode("", True)
if output:
yield output
def iterdecode(iterator, encoding, errors='strict', **kwargs):
"""
Decoding iterator.
Decodes the input strings from the iterator using a IncrementalDecoder.
errors and kwargs are passed through to the IncrementalDecoder
constructor.
"""
decoder = getincrementaldecoder(encoding)(errors, **kwargs)
for input in iterator:
output = decoder.decode(input)
if output:
yield output
output = decoder.decode("", True)
if output:
yield output
### Helpers for charmap-based codecs
def make_identity_dict(rng):
""" make_identity_dict(rng) -> dict
Return a dictionary where elements of the rng sequence are
mapped to themselves.
"""
res = {}
for i in rng:
res[i]=i
return res
def make_encoding_map(decoding_map):
""" Creates an encoding map from a decoding map.
If a target mapping in the decoding map occurs multiple
times, then that target is mapped to None (undefined mapping),
causing an exception when encountered by the charmap codec
during translation.
One example where this happens is cp875.py which decodes
multiple character to \\u001a.
"""
m = {}
for k,v in decoding_map.items():
if not v in m:
m[v] = k
else:
m[v] = None
return m
### error handlers
try:
strict_errors = lookup_error("strict")
ignore_errors = lookup_error("ignore")
replace_errors = lookup_error("replace")
xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
backslashreplace_errors = lookup_error("backslashreplace")
except LookupError:
# In --disable-unicode builds, these error handler are missing
strict_errors = None
ignore_errors = None
replace_errors = None
xmlcharrefreplace_errors = None
backslashreplace_errors = None
# Tell modulefinder that using codecs probably needs the encodings
# package
_false = 0
if _false:
import encodings
### Tests
if __name__ == '__main__':
# Make stdout translate Latin-1 output into UTF-8 output
sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
# Have stdin translate Latin-1 input into UTF-8 input
sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
| gpl-2.0 |
2013Commons/HUE-SHARK | desktop/core/ext-py/Django-1.2.3/tests/regressiontests/forms/localflavor/za.py | 33 | 1256 | tests = r"""
# ZAIDField #################################################################
ZAIDField validates that the date is a valid birthdate and that the value
has a valid checksum. It allows spaces and dashes, and returns a plain
string of digits.
>>> from django.contrib.localflavor.za.forms import ZAIDField
>>> f = ZAIDField()
>>> f.clean('0002290001003')
'0002290001003'
>>> f.clean('000229 0001 003')
'0002290001003'
>>> f.clean('0102290001001')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid South African ID number']
>>> f.clean('811208')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid South African ID number']
>>> f.clean('0002290001004')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid South African ID number']
# ZAPostCodeField ###########################################################
>>> from django.contrib.localflavor.za.forms import ZAPostCodeField
>>> f = ZAPostCodeField()
>>> f.clean('abcd')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid South African postal code']
>>> f.clean('0000')
u'0000'
>>> f.clean(' 7530')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid South African postal code']
"""
| apache-2.0 |
muralidcis/git-cola | cola/hotkeys.py | 1 | 2975 | from PyQt4.QtGui import QKeySequence
from PyQt4.QtCore import Qt
# A-G
STAGE_MODIFIED = Qt.AltModifier | Qt.Key_A
WORD_LEFT = Qt.Key_B
BRANCH = Qt.ControlModifier | Qt.Key_B
CHECKOUT = Qt.AltModifier | Qt.Key_B
CHERRY_PICK = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_C
DIFFSTAT = Qt.AltModifier | Qt.Key_D
DIFF = Qt.ControlModifier | Qt.Key_D
DIFF_SECONDARY = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_D
EDIT = Qt.ControlModifier | Qt.Key_E
EDIT_SECONDARY = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_E
EXPORT = Qt.AltModifier | Qt.Key_E
FIT = Qt.Key_F
FETCH = Qt.ControlModifier | Qt.Key_F
FILTER = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_F
GREP = Qt.ControlModifier | Qt.Key_G
# H-P
MOVE_LEFT = Qt.Key_H
HISTORY = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_H
SIGNOFF = Qt.ControlModifier | Qt.Key_I
MOVE_DOWN = Qt.Key_J
MOVE_DOWN_SECONDARY = Qt.AltModifier | Qt.Key_J
MOVE_DOWN_TERTIARY = Qt.ShiftModifier | Qt.Key_J
MOVE_UP = Qt.Key_K
MOVE_UP_SECONDARY = Qt.AltModifier | Qt.Key_K
MOVE_UP_TERTIARY = Qt.ShiftModifier | Qt.Key_K
MOVE_RIGHT = Qt.Key_L
FOCUS = Qt.ControlModifier | Qt.Key_L
AMEND = Qt.ControlModifier | Qt.Key_M
MERGE = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_M
PUSH = Qt.ControlModifier | Qt.Key_P
PULL = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_P
# Q-Z
QUIT = Qt.ControlModifier | Qt.Key_Q
REFRESH = Qt.ControlModifier | Qt.Key_R
REFRESH_SECONDARY = Qt.Key_F5
REFRESH_HOTKEYS = (REFRESH, REFRESH_SECONDARY)
STAGE_DIFF = Qt.Key_S
STAGE_SELECTION = Qt.ControlModifier | Qt.Key_S
STASH = Qt.AltModifier | Qt.ShiftModifier | Qt.Key_S
FINDER = Qt.ControlModifier | Qt.Key_T
FINDER_SECONDARY = Qt.Key_T
TERMINAL = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_T
STAGE_UNTRACKED = Qt.AltModifier | Qt.Key_U
REVERT = Qt.ControlModifier | Qt.Key_U
WORD_RIGHT = Qt.Key_W
UNDO = Qt.ControlModifier | Qt.Key_Z
# Numbers
START_OF_LINE = Qt.Key_0
# Special keys
BACKSPACE = Qt.Key_Backspace
TRASH = Qt.ControlModifier | Qt.Key_Backspace
DELETE_FILE = Qt.ControlModifier | Qt.ShiftModifier | Qt.Key_Backspace
DELETE_FILE_SECONDARY = Qt.ControlModifier | Qt.Key_Backspace
PREFERENCES = Qt.ControlModifier | Qt.Key_Comma
END_OF_LINE = Qt.Key_Dollar
DOWN = Qt.Key_Down
ENTER = Qt.Key_Enter
ZOOM_OUT = Qt.Key_Minus
REMOVE_ITEM = Qt.Key_Minus
ADD_ITEM = Qt.Key_Plus
ZOOM_IN = Qt.Key_Plus
ZOOM_IN_SECONDARY = Qt.Key_Equal
QUESTION = Qt.Key_Question
RETURN = Qt.Key_Return
ACCEPT = (ENTER, RETURN)
COMMIT = Qt.ControlModifier | Qt.Key_Return
PRIMARY_ACTION = Qt.Key_Space
SECONDARY_ACTION = Qt.ShiftModifier | Qt.Key_Space
LEAVE = Qt.ShiftModifier | Qt.Key_Tab
UP = Qt.Key_Up
# Rebase
REBASE_PICK = (Qt.Key_1, Qt.Key_P)
REBASE_REWORD = (Qt.Key_2, Qt.Key_R)
REBASE_EDIT = (Qt.Key_3, Qt.Key_E)
REBASE_FIXUP = (Qt.Key_4, Qt.Key_F)
REBASE_SQUASH = (Qt.Key_5, Qt.Key_S)
# Key Sequences
COPY = QKeySequence.Copy
CLOSE = QKeySequence.Close
CUT = QKeySequence.Cut
DELETE = QKeySequence.Delete
NEW = QKeySequence.New
OPEN = QKeySequence.Open
| gpl-2.0 |
soumith/fbthrift | thrift/test/py/constants_test.py | 9 | 2021 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
class TestPythonConstants(unittest.TestCase):
def testStrings(self):
from .constants import constants
self.assertEquals(constants.apostrophe, "'")
self.assertEquals(constants.tripleApostrophe, "'''")
self.assertEquals(constants.quotationMark, '"')
self.assertEquals(constants.quote, 'this is a "quote"')
self.assertEquals(constants.backslash, "\\")
self.assertEquals(constants.escaped_a, "a")
def testDict(self):
from .constants import constants
self.assertEquals(constants.escapeChars['apostrophe'], "'")
self.assertEquals(constants.escapeChars['quotationMark'], '"')
self.assertEquals(constants.escapeChars['backslash'], "\\")
self.assertEquals(constants.escapeChars['escaped_a'], "a")
self.assertEquals(constants.char2ascii["'"], 39)
self.assertEquals(constants.char2ascii['"'], 34)
self.assertEquals(constants.char2ascii["\\"], 92)
self.assertEquals(constants.char2ascii["a"], 97)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
MSM8916-Samsung/android_kernel_samsung_a7lte | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | 3596 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
| gpl-2.0 |
vmax-feihu/hue | desktop/core/ext-py/Django-1.6.10/django/contrib/gis/geos/tests/test_io.py | 105 | 4181 | from __future__ import unicode_literals
import binascii
import unittest
from django.contrib.gis import memoryview
from django.utils.unittest import skipUnless
from ..import HAS_GEOS
if HAS_GEOS:
from .. import GEOSGeometry, WKTReader, WKTWriter, WKBReader, WKBWriter, geos_version_info
@skipUnless(HAS_GEOS, "Geos is required.")
class GEOSIOTest(unittest.TestCase):
def test01_wktreader(self):
# Creating a WKTReader instance
wkt_r = WKTReader()
wkt = 'POINT (5 23)'
# read() should return a GEOSGeometry
ref = GEOSGeometry(wkt)
g1 = wkt_r.read(wkt.encode())
g2 = wkt_r.read(wkt)
for geom in (g1, g2):
self.assertEqual(ref, geom)
# Should only accept six.string_types objects.
self.assertRaises(TypeError, wkt_r.read, 1)
self.assertRaises(TypeError, wkt_r.read, memoryview(b'foo'))
def test02_wktwriter(self):
# Creating a WKTWriter instance, testing its ptr property.
wkt_w = WKTWriter()
self.assertRaises(TypeError, wkt_w._set_ptr, WKTReader.ptr_type())
ref = GEOSGeometry('POINT (5 23)')
ref_wkt = 'POINT (5.0000000000000000 23.0000000000000000)'
self.assertEqual(ref_wkt, wkt_w.write(ref).decode())
def test03_wkbreader(self):
# Creating a WKBReader instance
wkb_r = WKBReader()
hex = b'000000000140140000000000004037000000000000'
wkb = memoryview(binascii.a2b_hex(hex))
ref = GEOSGeometry(hex)
# read() should return a GEOSGeometry on either a hex string or
# a WKB buffer.
g1 = wkb_r.read(wkb)
g2 = wkb_r.read(hex)
for geom in (g1, g2):
self.assertEqual(ref, geom)
bad_input = (1, 5.23, None, False)
for bad_wkb in bad_input:
self.assertRaises(TypeError, wkb_r.read, bad_wkb)
def test04_wkbwriter(self):
wkb_w = WKBWriter()
# Representations of 'POINT (5 23)' in hex -- one normal and
# the other with the byte order changed.
g = GEOSGeometry('POINT (5 23)')
hex1 = b'010100000000000000000014400000000000003740'
wkb1 = memoryview(binascii.a2b_hex(hex1))
hex2 = b'000000000140140000000000004037000000000000'
wkb2 = memoryview(binascii.a2b_hex(hex2))
self.assertEqual(hex1, wkb_w.write_hex(g))
self.assertEqual(wkb1, wkb_w.write(g))
# Ensuring bad byteorders are not accepted.
for bad_byteorder in (-1, 2, 523, 'foo', None):
# Equivalent of `wkb_w.byteorder = bad_byteorder`
self.assertRaises(ValueError, wkb_w._set_byteorder, bad_byteorder)
# Setting the byteorder to 0 (for Big Endian)
wkb_w.byteorder = 0
self.assertEqual(hex2, wkb_w.write_hex(g))
self.assertEqual(wkb2, wkb_w.write(g))
# Back to Little Endian
wkb_w.byteorder = 1
# Now, trying out the 3D and SRID flags.
g = GEOSGeometry('POINT (5 23 17)')
g.srid = 4326
hex3d = b'0101000080000000000000144000000000000037400000000000003140'
wkb3d = memoryview(binascii.a2b_hex(hex3d))
hex3d_srid = b'01010000A0E6100000000000000000144000000000000037400000000000003140'
wkb3d_srid = memoryview(binascii.a2b_hex(hex3d_srid))
# Ensuring bad output dimensions are not accepted
for bad_outdim in (-1, 0, 1, 4, 423, 'foo', None):
# Equivalent of `wkb_w.outdim = bad_outdim`
self.assertRaises(ValueError, wkb_w._set_outdim, bad_outdim)
# These tests will fail on 3.0.0 because of a bug that was fixed in 3.1:
# http://trac.osgeo.org/geos/ticket/216
if not geos_version_info()['version'].startswith('3.0.'):
# Now setting the output dimensions to be 3
wkb_w.outdim = 3
self.assertEqual(hex3d, wkb_w.write_hex(g))
self.assertEqual(wkb3d, wkb_w.write(g))
# Telling the WKBWriter to include the srid in the representation.
wkb_w.srid = True
self.assertEqual(hex3d_srid, wkb_w.write_hex(g))
self.assertEqual(wkb3d_srid, wkb_w.write(g))
| apache-2.0 |
persandstrom/home-assistant | homeassistant/components/deconz/config_flow.py | 3 | 6271 | """Config flow to configure deCONZ component."""
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT
from homeassistant.helpers import aiohttp_client
from homeassistant.util.json import load_json
from .const import (
CONF_ALLOW_DECONZ_GROUPS, CONF_ALLOW_CLIP_SENSOR, CONFIG_FILE, DOMAIN)
CONF_BRIDGEID = 'bridgeid'
@callback
def configured_hosts(hass):
"""Return a set of the configured hosts."""
return set(entry.data[CONF_HOST] for entry
in hass.config_entries.async_entries(DOMAIN))
@config_entries.HANDLERS.register(DOMAIN)
class DeconzFlowHandler(config_entries.ConfigFlow):
"""Handle a deCONZ config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the deCONZ config flow."""
self.bridges = []
self.deconz_config = {}
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
return await self.async_step_init(user_input)
async def async_step_init(self, user_input=None):
"""Handle a deCONZ config flow start.
Only allows one instance to be set up.
If only one bridge is found go to link step.
If more than one bridge is found let user choose bridge to link.
"""
from pydeconz.utils import async_discovery
if configured_hosts(self.hass):
return self.async_abort(reason='one_instance_only')
if user_input is not None:
for bridge in self.bridges:
if bridge[CONF_HOST] == user_input[CONF_HOST]:
self.deconz_config = bridge
return await self.async_step_link()
session = aiohttp_client.async_get_clientsession(self.hass)
self.bridges = await async_discovery(session)
if len(self.bridges) == 1:
self.deconz_config = self.bridges[0]
return await self.async_step_link()
if len(self.bridges) > 1:
hosts = []
for bridge in self.bridges:
hosts.append(bridge[CONF_HOST])
return self.async_show_form(
step_id='init',
data_schema=vol.Schema({
vol.Required(CONF_HOST): vol.In(hosts)
})
)
return self.async_abort(
reason='no_bridges'
)
async def async_step_link(self, user_input=None):
"""Attempt to link with the deCONZ bridge."""
from pydeconz.utils import async_get_api_key
errors = {}
if user_input is not None:
if configured_hosts(self.hass):
return self.async_abort(reason='one_instance_only')
session = aiohttp_client.async_get_clientsession(self.hass)
api_key = await async_get_api_key(session, **self.deconz_config)
if api_key:
self.deconz_config[CONF_API_KEY] = api_key
return await self.async_step_options()
errors['base'] = 'no_key'
return self.async_show_form(
step_id='link',
errors=errors,
)
async def async_step_options(self, user_input=None):
"""Extra options for deCONZ.
CONF_CLIP_SENSOR -- Allow user to choose if they want clip sensors.
CONF_DECONZ_GROUPS -- Allow user to choose if they want deCONZ groups.
"""
from pydeconz.utils import async_get_bridgeid
if user_input is not None:
self.deconz_config[CONF_ALLOW_CLIP_SENSOR] = \
user_input[CONF_ALLOW_CLIP_SENSOR]
self.deconz_config[CONF_ALLOW_DECONZ_GROUPS] = \
user_input[CONF_ALLOW_DECONZ_GROUPS]
if CONF_BRIDGEID not in self.deconz_config:
session = aiohttp_client.async_get_clientsession(self.hass)
self.deconz_config[CONF_BRIDGEID] = await async_get_bridgeid(
session, **self.deconz_config)
return self.async_create_entry(
title='deCONZ-' + self.deconz_config[CONF_BRIDGEID],
data=self.deconz_config
)
return self.async_show_form(
step_id='options',
data_schema=vol.Schema({
vol.Optional(CONF_ALLOW_CLIP_SENSOR): bool,
vol.Optional(CONF_ALLOW_DECONZ_GROUPS): bool,
}),
)
async def async_step_discovery(self, discovery_info):
"""Prepare configuration for a discovered deCONZ bridge.
This flow is triggered by the discovery component.
"""
deconz_config = {}
deconz_config[CONF_HOST] = discovery_info.get(CONF_HOST)
deconz_config[CONF_PORT] = discovery_info.get(CONF_PORT)
deconz_config[CONF_BRIDGEID] = discovery_info.get('serial')
config_file = await self.hass.async_add_job(
load_json, self.hass.config.path(CONFIG_FILE))
if config_file and \
config_file[CONF_HOST] == deconz_config[CONF_HOST] and \
CONF_API_KEY in config_file:
deconz_config[CONF_API_KEY] = config_file[CONF_API_KEY]
return await self.async_step_import(deconz_config)
async def async_step_import(self, import_config):
"""Import a deCONZ bridge as a config entry.
This flow is triggered by `async_setup` for configured bridges.
This flow is also triggered by `async_step_discovery`.
This will execute for any bridge that does not have a
config entry yet (based on host).
If an API key is provided, we will create an entry.
Otherwise we will delegate to `link` step which
will ask user to link the bridge.
"""
if configured_hosts(self.hass):
return self.async_abort(reason='one_instance_only')
self.deconz_config = import_config
if CONF_API_KEY not in import_config:
return await self.async_step_link()
user_input = {CONF_ALLOW_CLIP_SENSOR: True,
CONF_ALLOW_DECONZ_GROUPS: True}
return await self.async_step_options(user_input=user_input)
| apache-2.0 |
jkonecny12/blivet | blivet/devices/lib.py | 6 | 5915 | # devices/lib.py
#
# Copyright (C) 2009-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Lehman <dlehman@redhat.com>
#
import os
from .. import errors
from .. import udev
from ..size import Size
LINUX_SECTOR_SIZE = Size(512)
def get_device_majors():
majors = {}
for line in open("/proc/devices").readlines():
try:
(major, device) = line.split()
except ValueError:
continue
try:
majors[int(major)] = device
except ValueError:
continue
return majors
device_majors = get_device_majors()
def devicePathToName(devicePath):
""" Return a name based on the given path to a device node.
:param devicePath: the path to a device node
:type devicePath: str
:returns: the name
:rtype: str
"""
if not devicePath:
return None
if devicePath.startswith("/dev/"):
name = devicePath[5:]
else:
name = devicePath
if name.startswith("mapper/"):
name = name[7:]
if name.startswith("md/"):
name = name[3:]
if name.startswith("/"):
name = os.path.basename(name)
return name
def deviceNameToDiskByPath(deviceName=None):
""" Return a /dev/disk/by-path/ symlink path for the given device name.
:param deviceName: the device name
:type deviceName: str
:returns: the full path to a /dev/disk/by-path/ symlink, or None
:rtype: str or NoneType
"""
if not deviceName:
return ""
ret = None
for dev in udev.get_devices():
if udev.device_get_name(dev) == deviceName:
ret = udev.device_get_by_path(dev)
break
if ret:
return ret
raise errors.DeviceNotFoundError(deviceName)
class ParentList(object):
""" A list with auditing and side-effects for additions and removals.
The class provides an ordered list with guaranteed unique members and
optional functions to run before adding or removing a member. It
provides a subset of the functionality provided by :class:`list`,
making it easy to ensure that changes pass through the check functions.
The following operations are implemented:
.. code::
ml.append(x)
ml.remove(x)
iter(ml)
len(ml)
x in ml
x = ml[i] # not ml[i] = x
"""
def __init__(self, items=None, appendfunc=None, removefunc=None):
"""
:keyword items: initial contents
:type items: any iterable
:keyword appendfunc: a function to call before adding an item
:type appendfunc: callable
:keyword removefunc: a function to call before removing an item
:type removefunc: callable
appendfunc and removefunc should take the item to be added or
removed and perform any checks or other processing. The appropriate
function will be called immediately before adding or removing the
item. The function should raise an exception if the addition/removal
should not take place. :class:`~.ParentList` instance is not passed
to the function. While this is not optimal for general-purpose use,
it is ideal for the intended use as part of :class:`~.Device`. The
functions themselves should not modify the :class:`~.ParentList`.
"""
self.items = list()
if items:
self.items.extend(items)
self.appendfunc = appendfunc or (lambda i: True)
""" a function to call before adding an item """
self.removefunc = removefunc or (lambda i: True)
""" a function to call before removing an item """
def __iter__(self):
return iter(self.items)
def __contains__(self, y):
return y in self.items
def __getitem__(self, i):
return self.items[i]
def __len__(self):
return len(self.items)
def append(self, y):
""" Add an item to the list after running a callback. """
if y in self.items:
raise ValueError("item is already in the list")
self.appendfunc(y)
self.items.append(y)
def remove(self, y):
""" Remove an item from the list after running a callback. """
if y not in self.items:
raise ValueError("item is not in the list")
self.removefunc(y)
self.items.remove(y)
def replace(self, x, y):
""" Replace the first instance of x with y, bypassing callbacks.
.. note::
It is usually a bad idea to bypass the callbacks. This is
intended for specific circumstances like toggling encryption of
container member devices in the devicefactory classes.
"""
if x not in self.items:
raise ValueError("item to be replaced is not in the list")
idx = self.items.index(x)
self.items[idx] = y
| lgpl-2.1 |
Maccimo/intellij-community | python/helpers/pydev/_pydev_bundle/_pydev_completer.py | 10 | 6762 | import sys
if sys.version_info[0] >= 3:
import builtins as __builtin__ # Py3
else:
import __builtin__
try:
import java.lang #@UnusedImport
from _pydev_bundle import _pydev_jy_imports_tipper
_pydev_imports_tipper = _pydev_jy_imports_tipper
except ImportError:
IS_JYTHON = False
from _pydev_bundle import _pydev_imports_tipper
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import dict_iter_items
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
#=======================================================================================================================
# _StartsWithFilter
#=======================================================================================================================
class _StartsWithFilter:
'''
Used because we can't create a lambda that'll use an outer scope in jython 2.1
'''
def __init__(self, start_with):
self.start_with = start_with.lower()
def __call__(self, name):
return name.lower().startswith(self.start_with)
#=======================================================================================================================
# Completer
#
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
#=======================================================================================================================
class Completer:
def __init__(self, namespace=None, global_namespace=None):
"""Create a new completer for the command line.
Completer([namespace,global_namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
An optional second namespace can be given. This allows the completer
to handle cases where both the local and global scopes need to be
distinguished.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
# The global namespace, if given, can be bound directly
if global_namespace is None:
self.global_namespace = {}
else:
self.global_namespace = global_namespace
def complete(self, text):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
#In pydev this option should never be used
raise RuntimeError('Namespace must be provided!')
if "." in text:
return self.attr_matches(text)
else:
return self.global_matches(text)
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace or self.global_namespace that match.
"""
def get_item(obj, attr):
return obj[attr]
a = {}
filtered_builtin = {}
for (key, val) in dict_iter_items(__builtin__.__dict__):
# do not use dict comprehension for Py2.6 compatibility
if not ((val is True) or (val is False) or (val is None)):
filtered_builtin[key] = val
for dict_with_comps in [filtered_builtin, self.namespace, self.global_namespace]: #@UndefinedVariable
a.update(dict_with_comps)
filter = _StartsWithFilter(text)
return dir2(a, a.keys(), get_item, filter)
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
# Another option, seems to work great. Catches things like ''.<tab>
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) #@UndefinedVariable
if not m:
return []
expr, attr = m.group(1, 3)
try:
obj = eval(expr, self.namespace)
except:
try:
obj = eval(expr, self.global_namespace)
except:
return []
filter = _StartsWithFilter(attr)
words = dir2(obj, filter=filter)
return words
#=======================================================================================================================
# generate_completions_as_xml
#=======================================================================================================================
def generate_completions_as_xml(frame, act_tok):
if frame is None:
return '<xml></xml>'
#Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
#(Names not resolved in generator expression in method)
#See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
updated_globals = {}
updated_globals.update(frame.f_globals)
updated_globals.update(frame.f_locals) #locals later because it has precedence over the actual globals
completer = Completer(updated_globals, None)
#list(tuple(name, descr, parameters, type))
completions = completer.complete(act_tok)
valid_xml = pydevd_xml.make_valid_xml_value
quote = pydevd_xml.quote
msg = ["<xml>"]
for comp in completions:
msg.append('<comp p0="')
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
msg.append('" p1="')
msg.append(valid_xml(quote(comp[1], '/>_= \t')))
msg.append('" p2="')
msg.append(valid_xml(quote(comp[2], '/>_= \t')))
msg.append('" p3="')
msg.append(valid_xml(quote(comp[3], '/>_= \t')))
msg.append('"/>')
msg.append("</xml>")
return ''.join(msg)
| apache-2.0 |
cloudify-incubator/cloudify-utilities-plugin | cloudify_ssh_key/operations.py | 1 | 9384 | ######
# Copyright (c) 2016-2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import get_desired_value
import sys
import os
import tempfile
import shutil
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend
from cloudify.decorators import operation
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import exception_to_error_cause
from cloudify import ctx, manager
from cloudify_rest_client.exceptions import CloudifyClientError
from cloudify_terminal import operation_cleanup
ALGORITHM = 'RSA'
# runtime names
SECRETS_KEY_NAME = 'secret_key_name'
PUBLIC_KEY_PATH = 'public_key_path'
PRIVATE_KEY_PATH = 'private_key_path'
PUBLIC_KEY_EXPORT = 'public_key_export'
PRIVATE_KEY_EXPORT = 'private_key_export'
SECRETS_KEY_OWNER = 'secrets_key_owner'
@operation(resumable=True)
def create(**_):
for key in [SECRETS_KEY_NAME, PUBLIC_KEY_PATH, PRIVATE_KEY_PATH,
PUBLIC_KEY_EXPORT, PRIVATE_KEY_EXPORT, SECRETS_KEY_OWNER]:
if key in ctx.instance.runtime_properties:
ctx.logger.error("You should run delete before run create")
return
config = get_desired_value(
'resource_config', _,
ctx.instance.runtime_properties,
ctx.node.properties)
private_key_path = config.get('private_key_path')
public_key_path = config.get('public_key_path')
openssh_format = config.get('openssh_format', True)
algorithm = config.get('algorithm')
bits = config.get('bits')
use_secret_store = config.get(
'use_secret_store') or ctx.node.properties.get('use_secret_store')
key_name = config.get('key_name') or '{0}-{1}'.format(ctx.deployment.id,
ctx.instance.id)
store_private_key_material = _.get('store_private_key_material', False)
store_public_key_material = _.get('store_public_key_material', True)
use_secrets_if_exist = config.get(
'use_secrets_if_exist') or ctx.node.properties.get(
'use_secrets_if_exist')
if config.get('comment'):
ctx.logger.error('Property "comment" not implemented.')
if config.get('passphrase'):
ctx.logger.error('Property "passphrase" not implemented.')
if config.get('unvalidated'):
ctx.logger.error('Property "unvalidated" not implemented.')
# openssh_format is of type boolean
if not openssh_format:
raise NonRecoverableError('Only OpenSSH format is supported')
if algorithm != ALGORITHM:
raise NonRecoverableError('Only RSA algorithm is supported')
if not use_secret_store and use_secrets_if_exist:
raise NonRecoverableError(
'Cant enable "use_secrets_if_exist" property without '
'enable "use_secret_store" property')
key_object = rsa.generate_private_key(
backend=default_backend(),
public_exponent=65537,
key_size=bits
)
private_key_export = key_object.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()).decode('utf-8')
public_key_export = key_object.public_key().public_bytes(
encoding=serialization.Encoding.OpenSSH,
format=serialization.PublicFormat.OpenSSH
).decode('utf-8')
if use_secret_store:
private_name = '{0}_private'.format(key_name)
public_name = '{0}_public'.format(key_name)
if use_secrets_if_exist and _check_if_secret_exist(
private_name) and _check_if_secret_exist(public_name):
ctx.instance.runtime_properties[SECRETS_KEY_OWNER] = False
private_key_export = _get_secret(private_name).value
public_key_export = _get_secret(public_name).value
# if the user want to use existing secrets but one of them is missing
elif use_secrets_if_exist and (
_check_if_secret_exist(public_name) ^ _check_if_secret_exist(
private_name)):
raise NonRecoverableError('Cant use existing secrets: {0}, {1} '
'because only one of them exists in '
'your manager'.format(public_name,
private_name))
else:
_create_secret(private_name, private_key_export)
_create_secret(public_name, public_key_export)
ctx.instance.runtime_properties[SECRETS_KEY_OWNER] = True
ctx.instance.runtime_properties[SECRETS_KEY_NAME] = key_name
if (
not private_key_path and
not use_secret_store and
not store_private_key_material
):
raise NonRecoverableError(
'Must provide private_key_path when use_secret_store is false')
if private_key_path:
_write_key_file(private_key_path,
private_key_export,
_private_key_permissions=True)
ctx.instance.runtime_properties[PRIVATE_KEY_PATH] = private_key_path
if public_key_path:
_write_key_file(public_key_path, public_key_export)
ctx.instance.runtime_properties[PUBLIC_KEY_PATH] = public_key_path
if store_public_key_material:
ctx.instance.runtime_properties[PUBLIC_KEY_EXPORT] = \
public_key_export
if store_private_key_material:
ctx.instance.runtime_properties[PRIVATE_KEY_EXPORT] = \
private_key_export
@operation_cleanup
def delete(**_):
# remove keys only if created on previous step
key_name = ctx.instance.runtime_properties.get(SECRETS_KEY_NAME)
if key_name and ctx.instance.runtime_properties.get(SECRETS_KEY_OWNER):
private_name = '{0}_private'.format(key_name)
if _get_secret(private_name):
_delete_secret(private_name)
public_name = '{0}_public'.format(key_name)
if _get_secret(public_name):
_delete_secret(public_name)
del ctx.instance.runtime_properties[SECRETS_KEY_NAME]
del ctx.instance.runtime_properties[SECRETS_KEY_OWNER]
else:
ctx.logger.info(
"Skipping delete secrets task because you are using a secret that"
" was not created in this deployment.")
# remove stored to filesystem keys
private_key_path = ctx.instance.runtime_properties.get(PRIVATE_KEY_PATH)
public_key_path = ctx.instance.runtime_properties.get(PUBLIC_KEY_PATH)
if private_key_path:
_remove_path(private_key_path)
del ctx.instance.runtime_properties[PRIVATE_KEY_PATH]
if public_key_path:
_remove_path(public_key_path)
del ctx.instance.runtime_properties[PUBLIC_KEY_PATH]
def _create_secret(key, value):
try:
client = manager.get_rest_client()
client.secrets.create(key, value)
except CloudifyClientError as e:
raise NonRecoverableError(str(e))
def _get_secret(key):
try:
client = manager.get_rest_client()
return client.secrets.get(key)
except CloudifyClientError as e:
raise NonRecoverableError(str(e))
def _check_if_secret_exist(key):
try:
if _get_secret(key).key == key:
return True
return False
except NonRecoverableError:
return False
def _delete_secret(key):
try:
client = manager.get_rest_client()
client.secrets.delete(key)
except CloudifyClientError as e:
raise NonRecoverableError(str(e))
def _write_key_file(_key_file_path,
_key_file_material,
_private_key_permissions=False):
expanded_key_path = os.path.expanduser(_key_file_path)
with tempfile.NamedTemporaryFile('wb', delete=False) as temporary_file:
temporary_file.write(_key_file_material.encode("utf-8"))
temporary_file.close()
try:
directory = os.path.dirname(expanded_key_path)
if not os.path.exists(directory):
os.makedirs(directory)
shutil.move(temporary_file.name, expanded_key_path)
except Exception:
_, last_ex, last_tb = sys.exc_info()
raise NonRecoverableError(
"Failed moving private key", causes=[
exception_to_error_cause(last_ex, last_tb)])
finally:
if os.path.exists(temporary_file.name):
os.remove(temporary_file.name)
if _private_key_permissions:
os.chmod(os.path.expanduser(_key_file_path), 0o600)
def _remove_path(key_path):
try:
path = os.path.expanduser(key_path)
if os.path.exists(path):
os.remove(path)
except OSError as e:
raise NonRecoverableError(str(e))
| apache-2.0 |
percipient/datadogpy | tests/unit/api/helper.py | 3 | 3033 | # python
import unittest
# datadog
from datadog import initialize, api
from datadog.api.base import CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource,\
GetableAPIResource, ListableAPIResource, ActionAPIResource
from datadog.util.compat import iteritems, json
# 3p
import requests
from mock import patch, Mock
API_KEY = "apikey"
APP_KEY = "applicationkey"
API_HOST = "host"
HOST_NAME = "agent.hostname"
FAKE_PROXY = {
"https": "http://user:pass@10.10.1.10:3128/",
}
class MockReponse(requests.Response):
content = None
def raise_for_status(self):
pass
# A few API Resources
class MyCreatable(CreateableAPIResource):
_class_url = '/creatables'
class MyUpdatable(UpdatableAPIResource):
_class_url = '/updatables'
class MyGetable(GetableAPIResource):
_class_url = '/getables'
class MyListable(ListableAPIResource):
_class_url = '/listables'
class MyDeletable(DeletableAPIResource):
_class_url = '/deletables'
class MyActionable(ActionAPIResource):
_class_url = '/actionables'
@classmethod
def trigger_class_action(cls, method, name, id=None, **params):
super(MyActionable, cls)._trigger_class_action(method, name, id, **params)
@classmethod
def trigger_action(cls, method, name, id=None, **params):
super(MyActionable, cls)._trigger_action(method, name, id, **params)
# Test classes
class DatadogAPITestCase(unittest.TestCase):
def setUp(self):
# Mock patch requests
self.request_patcher = patch('requests.Session')
request_class_mock = self.request_patcher.start()
self.request_mock = request_class_mock.return_value
self.request_mock.request = Mock(return_value=MockReponse())
def get_request_data(self):
"""
"""
_, kwargs = self.request_mock.request.call_args
return json.loads(kwargs['data'])
def request_called_with(self, method, url, data=None, params=None):
(req_method, req_url), others = self.request_mock.request.call_args
assert method == req_method, req_method
assert url == req_url, req_url
if data:
assert 'data' in others
assert json.dumps(data) == others['data'], others['data']
if params:
assert 'params' in others
for (k, v) in iteritems(params):
assert k in others['params'], others['params']
assert v == others['params'][k]
def tearDown(self):
self.request_patcher.stop()
class DatadogAPINoInitialization(DatadogAPITestCase):
def tearDown(self):
super(DatadogAPINoInitialization, self).tearDown()
# Restore default values
api._api_key = None
api._application_key = None
api._api_host = None
api._host_name = None
class DatadogAPIWithInitialization(DatadogAPITestCase):
def setUp(self):
super(DatadogAPIWithInitialization, self).setUp()
initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST)
| bsd-3-clause |
51x/EveryCon | chaos_test/test2-v3d4s.py | 1 | 6251 | jA0EAwMCWguE8aVZVWVgyey4FXnPY1gLFI14rvxeJekDYz0ux5SwAl3Loh3iLdQN
W3XHLJhfj+ygfQuY+ol2b21gh7VswD7w4NZ2Grkdo5lDH4DP5X0tlGi+jsRNe9WS
vS5qYn+nO54KxgzIRQdSneALo3vCBm+waDnsuMSTpGOIXIujdOZJd2LacUMNXbhx
ViCl9T788kkWKVGv8i/AOfYfafaCKdp8S1Ycc9Lo62iOyP0dOdfjAvg9Mx7iMVH7
5XDRwP3wvizS/NKgH6eP0Cp3VR+XLMyvrTrzAKCej8TIEQYHq92Z4ToPcNj0wCPQ
/+fLMPnVO+0wZgR4NSfGcNmvnnqVqkE4olN+YB0Jyb6dFKwzMtc0Yufcf4p2sBkh
m0GDS7HOdBA74ydfA/aEBrmrrg35SUjEv+1VJ0BKiiXNDUVCmbYCOWwzqD9bp72m
EzUCrVNosD3YtG/i4WtnFquKy21UYP6OwSE81q/5FzSIVQtcf8Jme5nl8fu+w7T5
Mo8+nYHuorYDhAWPHUMS0Qp/MgKmjEUwhWqY+gBYbedne+neNbri4IfWufW6kqM2
OD6AJzVBA5GNcAXcjkiYkCeCr7+qPGMlnoyG+ZvOyv5ybWy7cqeze1mx4Ul5lzqD
cnjvQy/EE7Qjk94q3MfKp6/kzxwX6W8uUKD/UG/lBBoL+ZHfdao2kd0bR8y8OAkb
JzjIXJfiv468wIfDGM0vs5XBy0pv/Logwgr3I8c6HcEOX2nj/BaWus5VWmLsJ7R6
Yijz04FTj6iuLMmzMMQWKDC5GkIk6b/o1QL2s54Du20y6hZT/llX/PxEfSDlCKS8
zCpQq1QUdldy662rtJCdYm1IYXvnue7v2pY74y/rrjMAhsujxlb8OPjQy9Hl0R5x
uaqIvOAzZsVVpfNeiA5P5gyEqjFJ0xwDjv18sVBHdDP5BjM+x5O/u+f5Tl6plHDN
x01heVLGZ8+jfYOZblj93fVCLmFdDfmc4z7ZZ3bglHsX6NL9U+Qcy+91+reJEODO
j/m8ZWOmgzduJFCjUsx8Kp7Is6NqWUabQO8Ysy8YJpA03qNXO9SrvVvEqB6CtvJQ
f0Yl8evbJaRWOqA0NfdeSga2nxJXhMe2czazahVft9faxSJcHKx7GkugcRJHlBYI
pEJ03jZ9NZqhf9Bit/UxXpOUqHt1HG1qX3oS9z1qicOEnrbkK41RLtSreVM35q+c
F83y4r8wUX3SqgrOAamuwCen8sSJEvwmMfQNjcNT3C0woMyrY60pvUS5vn1PVetI
QrMLXaYoGPOG3CP9i5eX2/amvN6YTpxNRk/wZqB0vBM9SAX2Oupnm/eJZjzijOxL
BrC03d9TvOHju7MJxtxad+0RfT1VwYEwJFl2RBz4orT1NvS8rF/HTEW5PvahRrvg
75Y7RKd05Hd5djbYn7tuLY9Bm9+PkOzMT5RF+zpBckuCFJW5BUiwG3Xd4mu4ZkFC
kk2KMQTZ1IXUIjuYnyDDbLO0rNvgqe081+nozxxSqI9a/AxQmyyIV1E/cn8UYND4
+fan+a0ryt60gMQToENsiz2h3Z0lMeqLF+h4zlL6dHTByKx7Lh3MgQSAJVyQD8u+
+q9ikHdubTpwZtJdz5isQMEP/tjX0Z96FEGy2yFxxjG98xZxBra6P+htLEz84JLr
UobRbD5poTcJbhkw3eWrtsG8wJ0Y21x+XR+YEgdnJWXas5tyRjGfG6EO7gm+Lh1O
yBd99bFfTFflWVjkzYpHJAp+PYAVuKsv1+8ITHU4wpFYAdjJFK0+jF2Q+7FK8c3P
fCepu/WDHpr0UMsWYAQ/9jF4YotuQxkBdxFteFhuhoJsDoblhILp1+opTHfS5sFn
kUjul6E+FQ15LucAIi7AvRXaVRxnRBMloaohdyOsL3h17jFM84U7qL+RZgZVAlaV
sP9gsGwhufR1P61s/mXYoHTCfRA5fwh26T1mKzl7cyH8COyd4A0dZB4THQek7jGs
ToksEdC3oPSE1ybqzBMBLZ97PMwkDEQvKf0JZ7Qn3l1feqIATB7OqcRaN0jyPMAR
nryl7zVqS42sK/oCILD3yDxLVaSkgKvxfyfwnA8o5sJbPzVD1VNoclARcEt0RS5U
loobMt4vT09u4HAAu1mGBtXWmaaXzsbG21K4DQJXGiVEGHDRJR5nd7MpIOk2W6A1
IP4mpkS0v4hWYcZ97Xb3XfUuNQwAr08qEbMByoDWui+XblErzZ95jQfke7ySOc3f
ogOEC6CQUBm1kCEd+VRJvbP5WYIR6P6LcG/amSdfukSW1kPgjyEN6n/5Smf62+Jn
3Jie16eGxdbJquAVvMcMfV6mhB15cYNrEZlTWUD/X2o0iUxEyPVbzNRKhUlYSrrS
FI/X9SXORGPfWbABXMC8iH8UUqcDlU391sEXmwoOgESG5po2p32utkKneBKlZ+dj
lOOjrVsB89/rlWMVsske9J2C1X6k/cbKRg7gon0nT/eeBNKCzWyZrcklqYmHBavU
XzCyi+jOTI/NTYnG0HuITMDIBrxqQV13ndtxPhoJPansSKaMtUfjiyKBma6Ov4hz
zJk5vz8I5EmGviNsMAz8bf5xLlqrbAAuOH68woKFoMDgx7t5CpUW8o/TgjszoF9P
8hkKREfowDhqQGd806s/vQyenD1Zuh3tK4tarm+iEZGh/bVvd+bVE5cvdkjl5W3H
1CtQAqt28P4GlTq8jA+kkeVAuxCY23v2UNjloTj9gzTIsKwid6AOOFz9vPHYE4j0
Own14aAHcoh2mQnnY/AVWICz7JrbVjeQmgmv5TAXPZFTArcnfcmM4raZkr4jyUOz
m8kDDnSqpgstsD1yJfrRfYYWDibdk3QyJql9hT53kzK0tRWJGtA4Bjplw8lx2aZ8
bMT8ONJ+qalo3BFhgJR3alTsJXX1Z4lb1XapRA/zOYT9s+GAZkD9b/KDtBHyK3k5
TUmPfQH9Umyp3iQzfPW8OOAOy8sRgJ1O/ZhoPkp2kd3QMcV6mpKGtB5qg457yD9j
7nJhb6iaUzXUjxqQ75MOTx77N2NI/6rzi9sJmJtpp0Mu34hjzwxndWj1AHTW9uU/
EC4BZsyQBMAZ6E2gKZBNEOba8ZbIS+lgiofwaSPQmRJ0m5x2GX6VTiw6xNiPpTk5
dXngXON3ec1v241IHk+YyFdHrWm2i7G66XUb6UKvggWfqfXXeTIs1LYls0iijy22
sGyzOPeJ9rIqz8FSWMKaYjC4W6IIiSAW6MHn7Z4XuV82l2DqphNDI8aGQ1dgAxS4
0yu1hJxlXic7kOd8kqj0JrB4PmAGeSHT9b/SrTySvYfI8OLvxzKB8Z440QIXKlGU
R8p5qvr78A8p3AFRySePa1xiuGqOSVl95L1vvuwEx8qQhLpm8DL6lVSwQ7qycsJm
JvzvOIPaqqi2fzB7PTgxqleOLqpjo0ppxhKoc5uV4Qn+Gga7QOr71mQJYK9boHn1
6wLZEhwC/OR131LTj7aS1baPSPnIaUaO7bzvO+w+1zZv9M+/sc6opdnfTvXGUV59
kvNsGrxuwj1TUN5hPAnUZjQg8RQ5SY0aSrWS0ZvboeBs7uaoVlPhN3N5E5/aH2gD
VkbPrkCPH5K9tXOEel+zkowgh9vV13KbeexC0mJB/C6qXm7J3SsIG1npbaTq62Wo
vIDZ79S1h7X5YYmO08zGM5kwwjtM/3uMZdoAJfqybyJMhDAVG2dQJNmHg0CkqVvp
uZvxrTNVp/zh7E3EoaIBAWO8sQqSUn3sZg96Uk0KGRR/tDn6KMHsNgEHaNTl2h9+
tD8qAMWKVfwbiS4NRs131gDZAxSlP9PXrTaAEQ9M+awI91q7YyCwkOXqbBoNlWJQ
pFzeSD9Uy2TGqLjEw3AK5T+I89+uD5rXplgGrkoQ2jbh8lU6OVjZ+3I4Lo4GHLcc
u1tbE5RaWUglwWEp5JCD/SjHP9KbrPf9KNiysE0I9yMVJVhsds6efcRYipFaSTEI
nVPIbrlSMbIeud9YqD5jczJsNej339cpJRNBE4gKen17bPxdUlIHoC/p3PETWItV
iFBthg/Hx8Sz5WRvMc/6gqy1JIlKr+IyYl1lHsycQum2GRC1PZ0W/wRL3IK7eEzs
EUxXrlBSv9GjsF25sH27N+bsYReYfTnkDNfg2Bj/LDuWSd2zWam9VVO5MLjW9Bmf
JtP6yt4oza5XmIEfg14XunLWSuGb+tF5A1EJeiOhfxiipoxbAN8NEG2rgYtoTR7N
9RMPPIRr/XWrFkEcWtDLvtyDYOBKv86fXIJqxQbCRBWnn6pCmE8sYWK49H9bV2ic
K6y+nMD59/lCJEvDIiFrOov40RuySVi821aBvZCiLN3hse+umL3grztHdmMCOWv3
dkl1a/Qi2KkM+w0mV9xqROZqxPSBUKc5sBorHK2OxrTgODCmVaKL5cv5V8Po3/EN
p3bvlLICP91gwyFpSvohEQgIJTfZDYAXMHn6ZM9NbdrniJh2aHsGIXEhST7AqRdc
q5gJno/1yTUPtZT+d+SBTognoDOpU8s8iR3kBrMfkHLN6xyp0LPPakEyUuwj2Vly
7BfHqh0erRFpZj82IsPSxGRgtFG5xeItRGX+aPK2xrFLy/FqyU5zmGfvn4nD47Nf
XegFQReRqizn2hPCjxRH5Eqd/7lmGssiqONyF9HnwWwm2j9cu13VXKXmQ7sBs/gS
Js1Xv9erHuHpDb8xi2D/pECTSnk2q25CGB1os0Y4HKyfJgc5DMIf/vjX+gXwYr+b
1lUbWTUnk9HDpjptiDbfG5UiJfgBzwCwVS/4M99S7cqptPnK/CpnHebyEvO7X/1R
vC79mqxx9Cbb4LHIz2rkQja4A3pkec5VXSBMipaoNRvlXLxhitagyWdbyUzpAb1W
PaFxZl00gqLlGzs5DbFU96816VF+5s+kgy56FxXWqZDsL2V2Qy69rhpuFcWEGDgq
AhaXK+axPVYqyWmYRtQu3zit4OL3R1B8r9nNMTzlFCmXBdMA6RKx83B+Mho4qhno
HL8AWk07OtBDcYc4533oALNWvShsuJuLMH3zL1g90q1noDCBD6waycEK7f/30l/J
7v865OaEtLRTrOaFNLpGd7lN+uyxWMFoTP3TCjzJdOnM+PXOILcF3z2ee5abmMDw
3XIQM1vxQnZi9onFC4dZPhTT9B+Yh9akQJlZ7wmyN7MuypwCTmFGnVAcqchHj6Vn
a9hByKxCyLhGIu4WtdIp5nNL/SgRzrurkZFoo2sVgouKAbPI473KUa7GXVVU9xyD
nZBcu5V1HGSjL3GzFah6LI6y6eXfuJJ0Qzwuj7Bq/xHMq9i+qvTY2qA5UKMPLOca
odJw2mGaXCToRX1SAjH38SZihT11wDu9BqUs8xP6c4JE+6lATHUUQPdtDjNPSkgY
vHk2FSxWq1i8Borf5gI/GN51lzTT86NaBIMsTHJh6QDpM2ii7oOc0dVR6KxNhBc3
kkisgPbQU+PNl0/TTRS86dSiXUIBzpMq+JLkEvXnzLMZwTAjYF8pActuHaz49s0m
Ho7TBzyEmNk5Lj6rjkOEGghOmqN3qXIQhEmdpW8aw/iVBq2NqtxBCoZC5Fz963Ua
rfU/XS5abyw89XN7+WN1q04vSZXyCj/aZgucSVWsrZkcsmLPWuacb5ND+E4wHmf8
BHvOXbqNQDWvFXXgVXEtAJSTjUcmfP4XgJfSBvDKfyk1OXTFokEdNwod9y1EIg3a
1c3d6oN6T0ameMhg31AZyJD894CDMpE/wINZxHTeFksmPH+Vz6HO6VoobBMWLtsL
ddemFy9I2ZAtd3GHozpMTLuoiuriD7XxgNj5znkrqzOugEU8uslEo90cpVHifL+F
XICL6qYEJq7oOq1upe6l1a+V71gTJcWW4BgWYHkOrkSPcWUUOv/bf1ESd9aVykzp
lRz0ILmB1CeXkIqjmcOgSbCMKde7E6s3i3TiHlbxbSmXFyo+GXn6tlCcgtWZ7aK4
a1hv/52uQBenekEJIj78BGRjdmtptlkLG6bdFLaQcE1fOGa3fkQswjCyO4gffiDa
hOHf3JrlD3THrvC1hE7rS1ke9J9FCLOawLv4IYW16cQENlVgDRLjA8LKR6WXPIeW
+7kqpgN1QkMRGkI9Y+5lZT9/GhoiBD1oluQuRfF3jSohlViIwKh8GqqTCiZZT3Uo
GiEx
=ETfn
| agpl-3.0 |
iansweeney/imuduino-btle | Arduino/libraries/FreeIMU/debug/timing.py | 21 | 2849 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
time.py - Tests the output coming from an Arduino with FreeIMU for speed.
Load the Arduino with the FreeIMU_serial program.
Copyright (C) 2012 Fabio Varesano <fvaresano@yahoo.it>
Development of this code has been supported by the Department of Computer Science,
Universita' degli Studi di Torino, Italy within the Piemonte Project
http://www.piemonte.di.unito.it/
This program is free software: you can redistribute it and/or modify
it under the terms of the version 3 GNU General Public License as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import time
import serial
from struct import unpack
from binascii import unhexlify
from subprocess import call
print "\n\nWelcome to the FreeIMU timer routine!\nCopyright © Fabio Varesano 2012.\nReleased under GPL v3 - See http://www.gnu.org/copyleft/gpl.html\n\n"
print "Please load the FreeIMU_serial program from the FreeIMU library examples on your Arduino. Once you correctly installed the FreeIMU library, the examples are available from File->Examples->FreeIMU in the Arduino IDE.\nWhen done, close the Arduino IDE and its serial monitor."
raw_input('Hit Enter to continue.')
arduino_port = raw_input('Insert the serial port which connects to the Arduino (See in the Arduino IDE Tools->Serial Port if in doubt): ')
# instantiate a serial port object. port gets opened by default, no need to explicitly open it.
ser = serial.Serial(
port= arduino_port,
baudrate=115200,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS
)
if ser.isOpen():
print "Arduino serial port opened correctly"
# we rely on the unhandled serial exception which will stop the program in case of problems during serial opening
ser.write('v') # ask version
print "\nFreeIMU library version informations:",
print ser.readline()
print "\nThe program will now start sampling debugging values and timing them.\n"
raw_input('Hit Enter to continue.')
buff = [0.0 for i in range(9)]
start = time.time()
tot_readings = 0
try:
print "Sampling from FreeIMU and timing readings"
while True:
ser.readline()
ser.readline()
ser.readline()
tot_readings = tot_readings + 1
if(tot_readings % 100 == 0):
tot_time = time.time() - start
print "%d readings obtained. Frequency %f over %d seconds. Hit CTRL+C to interrupt." % (tot_readings, tot_readings / tot_time, tot_time)
except KeyboardInterrupt:
ser.close()
| apache-2.0 |
ProjectQ-Framework/FermiLib | src/fermilib/utils/_jellium.py | 1 | 18174 | # Copyright 2017 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module constructs Hamiltonians for the uniform electron gas."""
from __future__ import absolute_import
import numpy
from projectq.ops import QubitOperator
from fermilib.ops import FermionOperator
# Exceptions.
class OrbitalSpecificationError(Exception):
pass
def orbital_id(grid, grid_coordinates, spin=None):
"""Return the tensor factor of a orbital with given coordinates and spin.
Args:
grid (Grid): The discretization to use.
grid_coordinates: List or tuple of ints giving coordinates of grid
element. Acceptable to provide an int (instead of tuple or list)
for 1D case.
spin: Boole, 0 means spin down and 1 means spin up.
If None, assume spinless model.
Returns:
tensor_factor (int):
tensor factor associated with provided orbital label.
"""
# Initialize.
if isinstance(grid_coordinates, int):
grid_coordinates = [grid_coordinates]
# Loop through dimensions of coordinate tuple.
tensor_factor = 0
for dimension, grid_coordinate in enumerate(grid_coordinates):
# Make sure coordinate is an integer in the correct bounds.
if isinstance(grid_coordinate, int) and grid_coordinate < grid.length:
tensor_factor += grid_coordinate * (grid.length ** dimension)
else:
# Raise for invalid model.
raise OrbitalSpecificationError(
'Invalid orbital coordinates provided.')
# Account for spin and return.
if spin is None:
return tensor_factor
else:
tensor_factor *= 2
tensor_factor += spin
return tensor_factor
def grid_indices(qubit_id, grid, spinless):
"""This function is the inverse of orbital_id.
Args:
qubit_id (int): The tensor factor to map to grid indices.
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
Returns:
grid_indices (numpy.ndarray[int]):
The location of the qubit on the grid.
"""
# Remove spin degree of freedom.
orbital_id = qubit_id
if not spinless:
if (orbital_id % 2):
orbital_id -= 1
orbital_id /= 2
# Get grid indices.
grid_indices = []
for dimension in range(grid.dimensions):
remainder = orbital_id % (grid.length ** (dimension + 1))
grid_index = remainder // (grid.length ** dimension)
grid_indices += [grid_index]
return grid_indices
def position_vector(position_indices, grid):
"""Given grid point coordinate, return position vector with dimensions.
Args:
position_indices (int|iterable[int]):
List or tuple of integers giving grid point coordinate.
Allowed values are ints in [0, grid_length).
grid (Grid): The discretization to use.
Returns:
position_vector (numpy.ndarray[float])
"""
# Raise exceptions.
if isinstance(position_indices, int):
position_indices = [position_indices]
if not all(0 <= e < grid.length for e in position_indices):
raise OrbitalSpecificationError(
'Position indices must be integers in [0, grid_length).')
# Compute position vector.
adjusted_vector = numpy.array(position_indices, float) - grid.length // 2
return grid.scale * adjusted_vector / float(grid.length)
def momentum_vector(momentum_indices, grid):
"""Given grid point coordinate, return momentum vector with dimensions.
Args:
momentum_indices: List or tuple of integers giving momentum indices.
Allowed values are ints in [0, grid_length).
grid (Grid): The discretization to use.
Returns:
momentum_vector: A numpy array giving the momentum vector with
dimensions.
"""
# Raise exceptions.
if isinstance(momentum_indices, int):
momentum_indices = [momentum_indices]
if not all(0 <= e < grid.length for e in momentum_indices):
raise OrbitalSpecificationError(
'Momentum indices must be integers in [0, grid_length).')
# Compute momentum vector.
adjusted_vector = numpy.array(momentum_indices, float) - grid.length // 2
return 2. * numpy.pi * adjusted_vector / grid.scale
def plane_wave_kinetic(grid, spinless=False):
"""Return the kinetic energy operator in the plane wave basis.
Args:
grid (fermilib.utils.Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
Returns:
FermionOperator: The kinetic momentum operator.
"""
# Initialize.
operator = FermionOperator()
spins = [None] if spinless else [0, 1]
# Loop once through all plane waves.
for momenta_indices in grid.all_points_indices():
momenta = momentum_vector(momenta_indices, grid)
coefficient = momenta.dot(momenta) / 2.
# Loop over spins.
for spin in spins:
orbital = orbital_id(grid, momenta_indices, spin)
# Add interaction term.
operators = ((orbital, 1), (orbital, 0))
operator += FermionOperator(operators, coefficient)
return operator
def plane_wave_potential(grid, spinless=False):
"""Return the potential operator in the plane wave basis.
Args:
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
Returns:
operator (FermionOperator)
"""
# Initialize.
prefactor = 2. * numpy.pi / grid.volume_scale()
operator = FermionOperator((), 0.0)
spins = [None] if spinless else [0, 1]
# Pre-Computations.
shifted_omega_indices_dict = {}
shifted_indices_minus_dict = {}
shifted_indices_plus_dict = {}
orbital_ids = {}
for indices_a in grid.all_points_indices():
shifted_omega_indices = [j - grid.length // 2 for j in indices_a]
shifted_omega_indices_dict[indices_a] = shifted_omega_indices
shifted_indices_minus_dict[indices_a] = {}
shifted_indices_plus_dict[indices_a] = {}
for indices_b in grid.all_points_indices():
shifted_indices_minus_dict[indices_a][indices_b] = tuple([
(indices_b[i] - shifted_omega_indices[i]) % grid.length
for i in range(grid.dimensions)])
shifted_indices_plus_dict[indices_a][indices_b] = tuple([
(indices_b[i] + shifted_omega_indices[i]) % grid.length
for i in range(grid.dimensions)])
orbital_ids[indices_a] = {}
for spin in spins:
orbital_ids[indices_a][spin] = orbital_id(grid, indices_a, spin)
# Loop once through all plane waves.
for omega_indices in grid.all_points_indices():
shifted_omega_indices = shifted_omega_indices_dict[omega_indices]
# Get the momenta vectors.
omega_momenta = momentum_vector(omega_indices, grid)
# Skip if omega momentum is zero.
if not omega_momenta.any():
continue
# Compute coefficient.
coefficient = prefactor / omega_momenta.dot(omega_momenta)
for grid_indices_a in grid.all_points_indices():
shifted_indices_d = (
shifted_indices_minus_dict[omega_indices][grid_indices_a])
for grid_indices_b in grid.all_points_indices():
shifted_indices_c = (
shifted_indices_plus_dict[omega_indices][grid_indices_b])
# Loop over spins.
for spin_a in spins:
orbital_a = orbital_ids[grid_indices_a][spin_a]
orbital_d = orbital_ids[shifted_indices_d][spin_a]
for spin_b in spins:
orbital_b = orbital_ids[grid_indices_b][spin_b]
orbital_c = orbital_ids[shifted_indices_c][spin_b]
# Add interaction term.
if ((orbital_a != orbital_b) and
(orbital_c != orbital_d)):
operators = ((orbital_a, 1), (orbital_b, 1),
(orbital_c, 0), (orbital_d, 0))
operator += FermionOperator(operators, coefficient)
# Return.
return operator
def dual_basis_jellium_model(grid, spinless=False,
kinetic=True, potential=True,
include_constant=False):
"""Return jellium Hamiltonian in the dual basis of arXiv:1706.00023
Args:
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
kinetic (bool): Whether to include kinetic terms.
potential (bool): Whether to include potential terms.
include_constant (bool): Whether to include the Madelung constant.
Returns:
operator (FermionOperator)
"""
# Initialize.
n_points = grid.num_points()
position_prefactor = 2. * numpy.pi / grid.volume_scale()
operator = FermionOperator()
spins = [None] if spinless else [0, 1]
# Pre-Computations.
position_vectors = {}
momentum_vectors = {}
momenta_squared_dict = {}
orbital_ids = {}
for indices in grid.all_points_indices():
position_vectors[indices] = position_vector(indices, grid)
momenta = momentum_vector(indices, grid)
momentum_vectors[indices] = momenta
momenta_squared_dict[indices] = momenta.dot(momenta)
orbital_ids[indices] = {}
for spin in spins:
orbital_ids[indices][spin] = orbital_id(grid, indices, spin)
# Loop once through all lattice sites.
for grid_indices_a in grid.all_points_indices():
coordinates_a = position_vectors[grid_indices_a]
for grid_indices_b in grid.all_points_indices():
coordinates_b = position_vectors[grid_indices_b]
differences = coordinates_b - coordinates_a
# Compute coefficients.
kinetic_coefficient = 0.
potential_coefficient = 0.
for momenta_indices in grid.all_points_indices():
momenta = momentum_vectors[momenta_indices]
momenta_squared = momenta_squared_dict[momenta_indices]
if momenta_squared == 0:
continue
cos_difference = numpy.cos(momenta.dot(differences))
if kinetic:
kinetic_coefficient += (
cos_difference * momenta_squared /
(2. * float(n_points)))
if potential:
potential_coefficient += (
position_prefactor * cos_difference / momenta_squared)
# Loop over spins and identify interacting orbitals.
orbital_a = {}
orbital_b = {}
for spin in spins:
orbital_a[spin] = orbital_ids[grid_indices_a][spin]
orbital_b[spin] = orbital_ids[grid_indices_b][spin]
if kinetic:
for spin in spins:
operators = ((orbital_a[spin], 1), (orbital_b[spin], 0))
operator += FermionOperator(operators, kinetic_coefficient)
if potential:
for sa in spins:
for sb in spins:
if orbital_a[sa] == orbital_b[sb]:
continue
operators = ((orbital_a[sa], 1), (orbital_a[sa], 0),
(orbital_b[sb], 1), (orbital_b[sb], 0))
operator += FermionOperator(operators,
potential_coefficient)
# Include the Madelung constant if requested.
if include_constant:
operator += FermionOperator.identity() * (2.8372 / grid.scale)
# Return.
return operator
def dual_basis_kinetic(grid, spinless=False):
"""Return the kinetic operator in the dual basis of arXiv:1706.00023.
Args:
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
Returns:
operator (FermionOperator)
"""
return dual_basis_jellium_model(grid, spinless, True, False)
def dual_basis_potential(grid, spinless=False):
"""Return the potential operator in the dual basis of arXiv:1706.00023
Args:
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
Returns:
operator (FermionOperator)
"""
return dual_basis_jellium_model(grid, spinless, False, True)
def jellium_model(grid, spinless=False, plane_wave=True,
include_constant=False):
"""Return jellium Hamiltonian as FermionOperator class.
Args:
grid (fermilib.utils.Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
plane_wave (bool): Whether to return in momentum space (True)
or position space (False).
include_constant (bool): Whether to include the Madelung constant.
Returns:
FermionOperator: The Hamiltonian of the model.
"""
if plane_wave:
hamiltonian = plane_wave_kinetic(grid, spinless)
hamiltonian += plane_wave_potential(grid, spinless)
else:
hamiltonian = dual_basis_jellium_model(grid, spinless)
# Include the Madelung constant if requested.
if include_constant:
hamiltonian += FermionOperator.identity() * (2.8372 / grid.scale)
return hamiltonian
def jordan_wigner_dual_basis_jellium(grid, spinless=False,
include_constant=False):
"""Return the jellium Hamiltonian as QubitOperator in the dual basis.
Args:
grid (Grid): The discretization to use.
spinless (bool): Whether to use the spinless model or not.
include_constant (bool): Whether to include the Madelung constant.
Returns:
hamiltonian (QubitOperator)
"""
# Initialize.
n_orbitals = grid.num_points()
volume = grid.volume_scale()
if spinless:
n_qubits = n_orbitals
else:
n_qubits = 2 * n_orbitals
hamiltonian = QubitOperator()
# Compute vectors.
momentum_vectors = {}
momenta_squared_dict = {}
for indices in grid.all_points_indices():
momenta = momentum_vector(indices, grid)
momentum_vectors[indices] = momenta
momenta_squared_dict[indices] = momenta.dot(momenta)
# Compute the identity coefficient and the coefficient of local Z terms.
identity_coefficient = 0.
z_coefficient = 0.
for k_indices in grid.all_points_indices():
momenta = momentum_vectors[k_indices]
if momenta.any():
momenta_squared = momenta.dot(momenta)
identity_coefficient += momenta_squared / 2.
identity_coefficient -= (numpy.pi * float(n_orbitals) /
(momenta_squared * volume))
z_coefficient += numpy.pi / (momenta_squared * volume)
z_coefficient -= momenta_squared / (4. * float(n_orbitals))
if spinless:
identity_coefficient /= 2.
# Add identity term.
identity_term = QubitOperator((), identity_coefficient)
hamiltonian += identity_term
# Add local Z terms.
for qubit in range(n_qubits):
qubit_term = QubitOperator(((qubit, 'Z'),), z_coefficient)
hamiltonian += qubit_term
# Add ZZ terms and XZX + YZY terms.
zz_prefactor = numpy.pi / volume
xzx_yzy_prefactor = .25 / float(n_orbitals)
for p in range(n_qubits):
index_p = grid_indices(p, grid, spinless)
position_p = position_vector(index_p, grid)
for q in range(p + 1, n_qubits):
index_q = grid_indices(q, grid, spinless)
position_q = position_vector(index_q, grid)
difference = position_p - position_q
skip_xzx_yzy = not spinless and (p + q) % 2
# Loop through momenta.
zpzq_coefficient = 0.
term_coefficient = 0.
for k_indices in grid.all_points_indices():
momenta = momentum_vectors[k_indices]
momenta_squared = momenta_squared_dict[k_indices]
if momenta_squared == 0:
continue
cos_difference = numpy.cos(momenta.dot(difference))
zpzq_coefficient += (zz_prefactor * cos_difference /
momenta_squared)
if skip_xzx_yzy:
continue
term_coefficient += (xzx_yzy_prefactor * cos_difference *
momenta_squared)
# Add ZZ term.
qubit_term = QubitOperator(((p, 'Z'), (q, 'Z')), zpzq_coefficient)
hamiltonian += qubit_term
# Add XZX + YZY term.
if skip_xzx_yzy:
continue
z_string = tuple((i, 'Z') for i in range(p + 1, q))
xzx_operators = ((p, 'X'),) + z_string + ((q, 'X'),)
yzy_operators = ((p, 'Y'),) + z_string + ((q, 'Y'),)
hamiltonian += QubitOperator(xzx_operators, term_coefficient)
hamiltonian += QubitOperator(yzy_operators, term_coefficient)
# Include the Madelung constant if requested.
if include_constant:
hamiltonian += QubitOperator((),) * (2.8372 / grid.scale)
# Return Hamiltonian.
return hamiltonian
| apache-2.0 |
andela-earinde/bellatrix-py | app/js/lib/lib/modules/distutils/bcppcompiler.py | 250 | 14941 | """distutils.bcppcompiler
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
for the Borland C++ compiler.
"""
# This implementation by Lyle Johnson, based on the original msvccompiler.py
# module and using the directions originally published by Gordon Williams.
# XXX looks like there's a LOT of overlap between these two classes:
# someone should sit down and factor out the common code as
# WindowsCCompiler! --GPW
__revision__ = "$Id$"
import os
from distutils.errors import (DistutilsExecError, CompileError, LibError,
LinkError, UnknownFileError)
from distutils.ccompiler import CCompiler, gen_preprocess_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
class BCPPCompiler(CCompiler) :
"""Concrete class that implements an interface to the Borland C/C++
compiler, as defined by the CCompiler abstract class.
"""
compiler_type = 'bcpp'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = _c_extensions + _cpp_extensions
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
# These executables are assumed to all be in the path.
# Borland doesn't seem to use any special registry settings to
# indicate their installation locations.
self.cc = "bcc32.exe"
self.linker = "ilink32.exe"
self.lib = "tlib.exe"
self.preprocess_options = None
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_static = []
self.ldflags_exe = ['/Gn', '/q', '/x']
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
# -- Worker methods ------------------------------------------------
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('-c')
if debug:
compile_opts.extend (self.compile_options_debug)
else:
compile_opts.extend (self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
# XXX why do the normpath here?
src = os.path.normpath(src)
obj = os.path.normpath(obj)
# XXX _setup_compile() did a mkpath() too but before the normpath.
# Is it possible to skip the normpath?
self.mkpath(os.path.dirname(obj))
if ext == '.res':
# This is already a binary file -- skip it.
continue # the 'for' loop
if ext == '.rc':
# This needs to be compiled to a .res file -- do it now.
try:
self.spawn (["brcc32", "-fo", obj, src])
except DistutilsExecError, msg:
raise CompileError, msg
continue # the 'for' loop
# The next two are both for the real compiler.
if ext in self._c_extensions:
input_opt = ""
elif ext in self._cpp_extensions:
input_opt = "-P"
else:
# Unknown file type -- no extra options. The compiler
# will probably fail, but let it just in case this is a
# file the compiler recognizes even if we don't.
input_opt = ""
output_opt = "-o" + obj
# Compiler command line syntax is: "bcc32 [options] file(s)".
# Note that the source file names must appear at the end of
# the command line.
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs + [src])
except DistutilsExecError, msg:
raise CompileError, msg
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = [output_filename, '/u'] + objects
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# XXX this ignores 'build_temp'! should follow the lead of
# msvccompiler.py
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
str(runtime_library_dirs))
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
# Figure out linker args based on type of target.
if target_desc == CCompiler.EXECUTABLE:
startup_obj = 'c0w32'
if debug:
ld_args = self.ldflags_exe_debug[:]
else:
ld_args = self.ldflags_exe[:]
else:
startup_obj = 'c0d32'
if debug:
ld_args = self.ldflags_shared_debug[:]
else:
ld_args = self.ldflags_shared[:]
# Create a temporary exports file for use by the linker
if export_symbols is None:
def_file = ''
else:
head, tail = os.path.split (output_filename)
modname, ext = os.path.splitext (tail)
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
def_file = os.path.join (temp_dir, '%s.def' % modname)
contents = ['EXPORTS']
for sym in (export_symbols or []):
contents.append(' %s=_%s' % (sym, sym))
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# Borland C++ has problems with '/' in paths
objects2 = map(os.path.normpath, objects)
# split objects in .obj and .res files
# Borland C++ needs them at different positions in the command line
objects = [startup_obj]
resources = []
for file in objects2:
(base, ext) = os.path.splitext(os.path.normcase(file))
if ext == '.res':
resources.append(file)
else:
objects.append(file)
for l in library_dirs:
ld_args.append("/L%s" % os.path.normpath(l))
ld_args.append("/L.") # we sometimes use relative paths
# list of object files
ld_args.extend(objects)
# XXX the command-line syntax for Borland C++ is a bit wonky;
# certain filenames are jammed together in one big string, but
# comma-delimited. This doesn't mesh too well with the
# Unix-centric attitude (with a DOS/Windows quoting hack) of
# 'spawn()', so constructing the argument list is a bit
# awkward. Note that doing the obvious thing and jamming all
# the filenames and commas into one argument would be wrong,
# because 'spawn()' would quote any filenames with spaces in
# them. Arghghh!. Apparently it works fine as coded...
# name of dll/exe file
ld_args.extend([',',output_filename])
# no map file and start libraries
ld_args.append(',,')
for lib in libraries:
# see if we find it and if there is a bcpp specific lib
# (xxx_bcpp.lib)
libfile = self.find_library_file(library_dirs, lib, debug)
if libfile is None:
ld_args.append(lib)
# probably a BCPP internal library -- don't warn
else:
# full name which prefers bcpp_xxx.lib over xxx.lib
ld_args.append(libfile)
# some default libraries
ld_args.append ('import32')
ld_args.append ('cw32mt')
# def file for export symbols
ld_args.extend([',',def_file])
# add resource files
ld_args.append(',')
ld_args.extend(resources)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
def find_library_file (self, dirs, lib, debug=0):
# List of effective library names to try, in order of preference:
# xxx_bcpp.lib is better than xxx.lib
# and xxx_d.lib is better than xxx.lib if debug is set
#
# The "_bcpp" suffix is to handle a Python installation for people
# with multiple compilers (primarily Distutils hackers, I suspect
# ;-). The idea is they'd have one static library for each
# compiler they care about, since (almost?) every Windows compiler
# seems to have a different format for static libraries.
if debug:
dlib = (lib + "_d")
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
else:
try_names = (lib + "_bcpp", lib)
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename(name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# overwrite the one from CCompiler to support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.res':
# these can go unchanged
obj_names.append (os.path.join (output_dir, base + ext))
elif ext == '.rc':
# these need to be compiled to .res-files
obj_names.append (os.path.join (output_dir, base + '.res'))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def preprocess (self,
source,
output_file=None,
macros=None,
include_dirs=None,
extra_preargs=None,
extra_postargs=None):
(_, macros, include_dirs) = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = ['cpp32.exe'] + pp_opts
if output_file is not None:
pp_args.append('-o' + output_file)
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or the
# source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
print msg
raise CompileError, msg
# preprocess()
| mit |
philanthropy-u/edx-platform | openedx/core/djangoapps/user_authn/views/login.py | 1 | 15901 | """
Views for login / logout and associated functionality
Much of this file was broken out from views.py, previous history can be found there.
"""
import logging
from django.conf import settings
from django.contrib.auth import authenticate, login as django_login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.urls import reverse
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_http_methods
from ratelimitbackend.exceptions import RateLimitException
from edxmako.shortcuts import render_to_response
from openedx.core.djangoapps.user_authn.cookies import set_logged_in_cookies, refresh_jwt_cookies
from openedx.core.djangoapps.user_authn.exceptions import AuthFailedError
import openedx.core.djangoapps.external_auth.views
from openedx.core.djangoapps.external_auth.models import ExternalAuthMap
from openedx.core.djangoapps.password_policy import compliance as password_policy_compliance
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.util.user_messages import PageLevelMessages
from openedx.core.djangolib.markup import HTML, Text
from student.models import LoginFailures
from student.views import send_reactivation_email_for_user
from student.forms import send_password_reset_email_for_user
from track import segment
import third_party_auth
from third_party_auth import pipeline, provider
from util.json_request import JsonResponse
from util.password_policy_validators import normalize_password
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
def _do_third_party_auth(request):
"""
User is already authenticated via 3rd party, now try to find and return their associated Django user.
"""
running_pipeline = pipeline.get(request)
username = running_pipeline['kwargs'].get('username')
backend_name = running_pipeline['backend']
third_party_uid = running_pipeline['kwargs']['uid']
requested_provider = provider.Registry.get_from_pipeline(running_pipeline)
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
try:
return pipeline.get_authenticated_user(requested_provider, username, third_party_uid)
except User.DoesNotExist:
AUDIT_LOG.info(
u"Login failed - user with username {username} has no social auth "
"with backend_name {backend_name}".format(
username=username, backend_name=backend_name)
)
message = _(
"You've successfully logged into your {provider_name} account, "
"but this account isn't linked with an {platform_name} account yet."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += _(
"Use your {platform_name} username and password to log into {platform_name} below, "
"and then link your {platform_name} account with {provider_name} from your dashboard."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += Text(_(
"If you don't have an {platform_name} account yet, "
"click {register_label_strong} at the top of the page."
)).format(
platform_name=platform_name,
register_label_strong=HTML('<strong>{register_text}</strong>').format(
register_text=_('Register')
)
)
raise AuthFailedError(message)
def _get_user_by_email(request):
"""
Finds a user object in the database based on the given request, ignores all fields except for email.
"""
if 'email' not in request.POST or 'password' not in request.POST:
raise AuthFailedError(_('There was an error receiving your login information. Please email us.'))
email = request.POST['email']
try:
return User.objects.get(email=email)
except User.DoesNotExist:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Unknown user email")
else:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
def _check_shib_redirect(user):
"""
See if the user has a linked shibboleth account, if so, redirect the user to shib-login.
This behavior is pretty much like what gmail does for shibboleth. Try entering some @stanford.edu
address into the Gmail login.
"""
if settings.FEATURES.get('AUTH_USE_SHIB') and user:
try:
eamap = ExternalAuthMap.objects.get(user=user)
if eamap.external_domain.startswith(openedx.core.djangoapps.external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
raise AuthFailedError('', redirect=reverse('shib-login'))
except ExternalAuthMap.DoesNotExist:
# This is actually the common case, logging in user without external linked login
AUDIT_LOG.info(u"User %s w/o external auth attempting login", user)
def _check_excessive_login_attempts(user):
"""
See if account has been locked out due to excessive login failures
"""
if user and LoginFailures.is_feature_enabled():
if LoginFailures.is_user_locked_out(user):
raise AuthFailedError(_('This account has been temporarily locked due '
'to excessive login failures. Try again later.'))
def _enforce_password_policy_compliance(request, user):
try:
password_policy_compliance.enforce_compliance_on_login(user, request.POST.get('password'))
except password_policy_compliance.NonCompliantPasswordWarning as e:
# Allow login, but warn the user that they will be required to reset their password soon.
PageLevelMessages.register_warning_message(request, e.message)
except password_policy_compliance.NonCompliantPasswordException as e:
send_password_reset_email_for_user(user, request)
# Prevent the login attempt.
raise AuthFailedError(e.message)
def _generate_not_activated_message(user):
"""
Generates the message displayed on the sign-in screen when a learner attempts to access the
system with an inactive account.
"""
support_url = configuration_helpers.get_value(
'SUPPORT_SITE_LINK',
settings.SUPPORT_SITE_LINK
)
platform_name = configuration_helpers.get_value(
'PLATFORM_NAME',
settings.PLATFORM_NAME
)
not_activated_msg_template = _('In order to sign in, you need to activate your account.<br /><br />'
'We just sent an activation link to <strong>{email}</strong>. If '
'you do not receive an email, check your spam folders or '
'<a href="{support_url}">contact {platform} Support</a>.')
not_activated_message = not_activated_msg_template.format(
email=user.email,
support_url=support_url,
platform=platform_name
)
return not_activated_message
def _log_and_raise_inactive_user_auth_error(unauthenticated_user):
"""
Depending on Django version we can get here a couple of ways, but this takes care of logging an auth attempt
by an inactive user, re-sending the activation email, and raising an error with the correct message.
"""
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(
u"Login failed - Account not active for user.id: {0}, resending activation".format(
unauthenticated_user.id)
)
else:
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(
unauthenticated_user.username)
)
send_reactivation_email_for_user(unauthenticated_user)
raise AuthFailedError(_generate_not_activated_message(unauthenticated_user))
def _authenticate_first_party(request, unauthenticated_user):
"""
Use Django authentication on the given request, using rate limiting if configured
"""
# If the user doesn't exist, we want to set the username to an invalid username so that authentication is guaranteed
# to fail and we can take advantage of the ratelimited backend
username = unauthenticated_user.username if unauthenticated_user else ""
try:
password = normalize_password(request.POST['password'])
return authenticate(
username=username,
password=password,
request=request
)
# This occurs when there are too many attempts from the same IP address
except RateLimitException:
raise AuthFailedError(_('Too many failed login attempts. Try again later.'))
def _handle_failed_authentication(user, authenticated_user):
"""
Handles updating the failed login count, inactive user notifications, and logging failed authentications.
"""
if user:
if LoginFailures.is_feature_enabled():
LoginFailures.increment_lockout_counter(user)
if authenticated_user and not user.is_active:
_log_and_raise_inactive_user_auth_error(user)
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
loggable_id = user.id if user else "<unknown>"
AUDIT_LOG.warning(u"Login failed - password for user.id: {0} is invalid".format(loggable_id))
else:
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(user.email))
raise AuthFailedError(_('Email or password is incorrect.'))
def _handle_successful_authentication_and_login(user, request):
"""
Handles clearing the failed login counter, login tracking, and setting session timeout.
"""
if LoginFailures.is_feature_enabled():
LoginFailures.clear_lockout_counter(user)
_track_user_login(user, request)
try:
django_login(request, user)
request.session.set_expiry(604800 * 4)
log.debug("Setting user session expiry to 4 weeks")
except Exception as exc:
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(exc)
raise
def _track_user_login(user, request):
"""
Sends a tracking event for a successful login.
"""
segment.identify(
user.id,
{
'email': request.POST.get('email'),
'username': user.username
},
{
# Disable MailChimp because we don't want to update the user's email
# and username in MailChimp on every page load. We only need to capture
# this data on registration/activation.
'MailChimp': False
}
)
segment.track(
user.id,
"edx.bi.user.account.authenticated",
{
'category': "conversion",
'label': request.POST.get('course_id'),
'provider': None
},
)
@login_required
@require_http_methods(['GET'])
def finish_auth(request): # pylint: disable=unused-argument
""" Following logistration (1st or 3rd party), handle any special query string params.
See FinishAuthView.js for details on the query string params.
e.g. auto-enroll the user in a course, set email opt-in preference.
This view just displays a "Please wait" message while AJAX calls are made to enroll the
user in the course etc. This view is only used if a parameter like "course_id" is present
during login/registration/third_party_auth. Otherwise, there is no need for it.
Ideally this view will finish and redirect to the next step before the user even sees it.
Args:
request (HttpRequest)
Returns:
HttpResponse: 200 if the page was sent successfully
HttpResponse: 302 if not logged in (redirect to login page)
HttpResponse: 405 if using an unsupported HTTP method
Example usage:
GET /account/finish_auth/?course_id=course-v1:blah&enrollment_action=enroll
"""
return render_to_response('student_account/finish_auth.html', {
'disable_courseware_js': True,
'disable_footer': True,
})
@ensure_csrf_cookie
def login_user(request):
"""
AJAX request to log in the user.
"""
third_party_auth_requested = third_party_auth.is_enabled() and pipeline.running(request)
first_party_auth_requested = bool(request.POST.get('email')) or bool(request.POST.get('password'))
is_user_third_party_authenticated = False
try:
if third_party_auth_requested and not first_party_auth_requested:
# The user has already authenticated via third-party auth and has not
# asked to do first party auth by supplying a username or password. We
# now want to put them through the same logging and cookie calculation
# logic as with first-party auth.
# This nested try is due to us only returning an HttpResponse in this
# one case vs. JsonResponse everywhere else.
try:
user = _do_third_party_auth(request)
is_user_third_party_authenticated = True
except AuthFailedError as e:
return HttpResponse(e.value, content_type="text/plain", status=403)
else:
user = _get_user_by_email(request)
_check_shib_redirect(user)
_check_excessive_login_attempts(user)
possibly_authenticated_user = user
if not is_user_third_party_authenticated:
possibly_authenticated_user = _authenticate_first_party(request, user)
if possibly_authenticated_user and password_policy_compliance.should_enforce_compliance_on_login():
# Important: This call must be made AFTER the user was successfully authenticated.
_enforce_password_policy_compliance(request, possibly_authenticated_user)
if possibly_authenticated_user is None or not possibly_authenticated_user.is_active:
_handle_failed_authentication(user, possibly_authenticated_user)
_handle_successful_authentication_and_login(possibly_authenticated_user, request)
redirect_url = None # The AJAX method calling should know the default destination upon success
if is_user_third_party_authenticated:
running_pipeline = pipeline.get(request)
redirect_url = pipeline.get_complete_url(backend_name=running_pipeline['backend'])
response = JsonResponse({
'success': True,
'redirect_url': redirect_url,
})
# Ensure that the external marketing site can
# detect that the user is logged in.
return set_logged_in_cookies(request, response, possibly_authenticated_user)
except AuthFailedError as error:
log.exception(error.get_response())
return JsonResponse(error.get_response())
# CSRF protection is not needed here because the only side effect
# of this endpoint is to refresh the cookie-based JWT, and attempting
# to get a CSRF token before we need to refresh adds too much
# complexity.
@csrf_exempt
@require_http_methods(['POST'])
def login_refresh(request):
try:
response = JsonResponse({'success': True})
return refresh_jwt_cookies(request, response)
except AuthFailedError as error:
log.exception(error.get_response())
return JsonResponse(error.get_response(), status=400)
| agpl-3.0 |
zayneanderson/aima-python | submissions/aardvark/mygames.py | 21 | 5147 | from collections import namedtuple
from games import (Game)
class GameState:
def __init__(self, to_move, board, label=None, depth=8):
self.to_move = to_move
self.board = board
self.label = label
self.maxDepth = depth
def __str__(self):
if self.label == None:
return super(GameState, self).__str__()
return self.label
class FlagrantCopy(Game):
"""A flagrant copy of TicTacToe, from game.py
It's simplified, so that moves and utility are calculated as needed
Play TicTacToe on an h x v board, with Max (first player) playing 'X'.
A state has the player to move and a board, in the form of
a dict of {(x, y): Player} entries, where Player is 'X' or 'O'."""
def __init__(self, h=3, v=3, k=3):
self.h = h
self.v = v
self.k = k
self.initial = GameState(to_move='X', board={})
def actions(self, state):
try:
return state.moves
except:
pass
"Legal moves are any square not yet taken."
moves = []
for x in range(1, self.h + 1):
for y in range(1, self.v + 1):
if (x,y) not in state.board.keys():
moves.append((x,y))
state.moves = moves
return moves
# defines the order of play
def opponent(self, player):
if player == 'X':
return 'O'
if player == 'O':
return 'X'
return None
def result(self, state, move):
if move not in self.actions(state):
return state # Illegal move has no effect
board = state.board.copy()
player = state.to_move
board[move] = player
next_mover = self.opponent(player)
return GameState(to_move=next_mover, board=board)
def utility(self, state, player):
"Return the value to player; 1 for win, -1 for loss, 0 otherwise."
try:
return state.utility if player == 'X' else -state.utility
except:
pass
board = state.board
util = self.check_win(board, 'X')
if util == 0:
util = -self.check_win(board, 'O')
state.utility = util
return util if player == 'X' else -util
# Did I win?
def check_win(self, board, player):
# check rows
for y in range(1, self.v + 1):
if self.k_in_row(board, (1,y), player, (1,0)):
return 1
# check columns
for x in range(1, self.h + 1):
if self.k_in_row(board, (x,1), player, (0,1)):
return 1
# check \ diagonal
if self.k_in_row(board, (1,1), player, (1,1)):
return 1
# check / diagonal
if self.k_in_row(board, (3,1), player, (-1,1)):
return 1
return 0
# does player have K in a row? return 1 if so, 0 if not
def k_in_row(self, board, start, player, direction):
"Return true if there is a line through start on board for player."
(delta_x, delta_y) = direction
x, y = start
n = 0 # n is number of moves in row
while board.get((x, y)) == player:
n += 1
x, y = x + delta_x, y + delta_y
x, y = start
while board.get((x, y)) == player:
n += 1
x, y = x - delta_x, y - delta_y
n -= 1 # Because we counted start itself twice
return n >= self.k
def terminal_test(self, state):
"A state is terminal if it is won or there are no empty squares."
return self.utility(state, 'X') != 0 or len(self.actions(state)) == 0
def display(self, state):
board = state.board
for x in range(1, self.h + 1):
for y in range(1, self.v + 1):
print(board.get((x, y), '.'), end=' ')
print()
myGame = FlagrantCopy()
won = GameState(
to_move = 'O',
board = {(1,1): 'X', (1,2): 'X', (1,3): 'X',
(2,1): 'O', (2,2): 'O',
},
label = 'won'
)
winin1 = GameState(
to_move = 'X',
board = {(1,1): 'X', (1,2): 'X',
(2,1): 'O', (2,2): 'O',
},
label = 'winin1'
)
losein1 = GameState(
to_move = 'O',
board = {(1,1): 'X', (1,2): 'X',
(2,1): 'O', (2,2): 'O',
(3,1): 'X',
},
label = 'losein1'
)
winin3 = GameState(
to_move = 'X',
board = {(1,1): 'X', (1,2): 'O',
(2,1): 'X',
(3,1): 'O',
},
label = 'winin3'
)
losein3 = GameState(
to_move = 'O',
board = {(1,1): 'X',
(2,1): 'X',
(3,1): 'O', (1,2): 'X', (1,2): 'O',
},
label = 'losein3'
)
winin5 = GameState(
to_move = 'X',
board = {(1,1): 'X', (1,2): 'O',
(2,1): 'X',
},
label = 'winin5'
)
lost = GameState(
to_move = 'X',
board = {(1,1): 'X', (1,2): 'X',
(2,1): 'O', (2,2): 'O', (2,3): 'O',
(3,1): 'X'
},
label = 'lost'
)
myGames = {
myGame: [
won,
winin1, losein1, winin3, losein3, winin5,
lost,
]
} | mit |
okfn/ckanext-groupadmin | ckanext/groupadmin/authz.py | 1 | 1227 | '''This module monkey patches functions in ckan/authz.py and updates the
default roles with custom roles and decorates
has_user_permission_for_group_org_org to allow a GroupAdmin to admin groups.
GroupAdmins can manage all organizations/groups, but have no other sysadmin
powers.
'''
from ckan import authz, model
from ckan.plugins import toolkit
from ckanext.groupadmin.model import GroupAdmin
authz.ROLE_PERMISSIONS.update({'group_admin': ['read', 'manage_group']})
def _trans_role_group_admin():
return toolkit._('Group Admin')
authz._trans_role_group_admin = _trans_role_group_admin
def is_group_admin_decorator(method):
def decorate_has_user_permission_for_group_or_org(group_id, user_name,
permission):
user_id = authz.get_user_id_for_username(user_name, allow_none=True)
if not user_id:
return False
if GroupAdmin.is_user_group_admin(model.Session, user_id):
return True
return method(group_id, user_name, permission)
return decorate_has_user_permission_for_group_or_org
authz.has_user_permission_for_group_or_org = is_group_admin_decorator(
authz.has_user_permission_for_group_or_org)
| agpl-3.0 |
cogmission/nupic | examples/opf/experiments/multistep/hotgym_best_tp_5step/description.py | 6 | 3184 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
import os
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config = \
{ 'modelParams': { 'clParams': { 'verbosity': 0},
'sensorParams': { 'encoders': { 'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'n': 28,
'name': u'consumption',
'type': 'AdaptiveScalarEncoder',
'w': 21},
'timestamp_dayOfWeek': { 'dayOfWeek': ( 21,
3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': ( 21,
1),
'type': 'DateEncoder'},
'timestamp_weekend': None},
'verbosity': 0},
'spParams': { },
'tpParams': { 'activationThreshold': 13,
'minThreshold': 9,
'verbosity': 0}}}
mod = importBaseDescription('../hotgym/description.py', config)
locals().update(mod.__dict__)
| agpl-3.0 |
bbockelm/root | interpreter/llvm/src/tools/clang/bindings/python/tests/cindex/util.py | 88 | 2293 | # This file provides common utility functions for the test suite.
from clang.cindex import Cursor
from clang.cindex import TranslationUnit
def get_tu(source, lang='c', all_warnings=False, flags=[]):
"""Obtain a translation unit from source and language.
By default, the translation unit is created from source file "t.<ext>"
where <ext> is the default file extension for the specified language. By
default it is C, so "t.c" is the default file name.
Supported languages are {c, cpp, objc}.
all_warnings is a convenience argument to enable all compiler warnings.
"""
args = list(flags)
name = 't.c'
if lang == 'cpp':
name = 't.cpp'
args.append('-std=c++11')
elif lang == 'objc':
name = 't.m'
elif lang != 'c':
raise Exception('Unknown language: %s' % lang)
if all_warnings:
args += ['-Wall', '-Wextra']
return TranslationUnit.from_source(name, args, unsaved_files=[(name,
source)])
def get_cursor(source, spelling):
"""Obtain a cursor from a source object.
This provides a convenient search mechanism to find a cursor with specific
spelling within a source. The first argument can be either a
TranslationUnit or Cursor instance.
If the cursor is not found, None is returned.
"""
# Convenience for calling on a TU.
root_cursor = source if isinstance(source, Cursor) else source.cursor
for cursor in root_cursor.walk_preorder():
if cursor.spelling == spelling:
return cursor
return None
def get_cursors(source, spelling):
"""Obtain all cursors from a source object with a specific spelling.
This provides a convenient search mechanism to find all cursors with
specific spelling within a source. The first argument can be either a
TranslationUnit or Cursor instance.
If no cursors are found, an empty list is returned.
"""
# Convenience for calling on a TU.
root_cursor = source if isinstance(source, Cursor) else source.cursor
cursors = []
for cursor in root_cursor.walk_preorder():
if cursor.spelling == spelling:
cursors.append(cursor)
return cursors
__all__ = [
'get_cursor',
'get_cursors',
'get_tu',
]
| lgpl-2.1 |
2014c2g4/2015cda0623 | static/Brython3.1.3-20150514-095342/Lib/site-packages/header.py | 91 | 2444 | from browser import document as doc
from browser.html import *
trans_menu = {
'menu_console':{'en':'Console','es':'Consola','fr':'Console', 'pt':'Console'},
'menu_editor':{'en':'Editor','es':'Editor','fr':'Editeur', 'pt':'Editor'},
'menu_gallery':{'en':'Gallery','es':'Galería','fr':'Galerie', 'pt':'Galeria'},
'menu_doc':{'en':'Documentation','es':'Documentación','fr':'Documentation', 'pt':'Documentação'},
'menu_download':{'en':'Download','es':'Descargas','fr':'Téléchargement', 'pt':'Download'},
'menu_dev':{'en':'Development','es':'Desarrollo','fr':'Développement', 'pt':'Desenvolvimento'},
'menu_groups':{'en':'Social','es':'Social','fr':'Social', 'pt':'Social'}
}
links = {'home':'index.html',
'console':'tests/console.html',
'editor':'tests/editor.html',
'gallery':'gallery/gallery_%s.html',
'doc':'static_doc/%s/intro.html',
'download':'https://github.com/brython-dev/brython/releases',
'dev':'https://github.com/brython-dev/brython',
'groups':'groups.html'
}
def show(prefix='', language=None):
# detect language
has_req = False
if language is None:
qs_lang = doc.query.getfirst("lang")
if qs_lang and qs_lang in ["en","fr","es","pt"]:
has_req = True
language = qs_lang
else:
import locale
try:
lang,enc = locale.getdefaultlocale()
lang = lang[:2]
if lang in ["en","fr","es","pt"]:
language = lang
except:
pass
language = language or 'en'
_banner = doc['banner_row']
for key in ['console','editor','gallery','doc','download','dev','groups']:
if key in ['download','dev']:
href = links[key]
else:
href = prefix+links[key]
if key in ['doc','gallery']:
href = href %language
if key not in ['download','dev']:
# add lang to href
href += '?lang=%s' %language
if key == 'home':
img = IMG(src="/brython.svg",Class="logo")
link = A(img,href=href)
cell = TD(link,Class="logo")
else:
link = A(trans_menu['menu_%s'%key][language],href=href,Class="banner")
cell = TD(link)
if key in ['download','dev']:
link.target = "_blank"
_banner <= cell
return qs_lang,language
| gpl-3.0 |
kingmotley/SickRage | lib/sqlalchemy/testing/assertions.py | 75 | 15418 | # testing/assertions.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from __future__ import absolute_import
from . import util as testutil
from sqlalchemy import pool, orm, util
from sqlalchemy.engine import default, create_engine, url
from sqlalchemy import exc as sa_exc
from sqlalchemy.util import decorator
from sqlalchemy import types as sqltypes, schema
import warnings
import re
from .warnings import resetwarnings
from .exclusions import db_spec, _is_excluded
from . import assertsql
from . import config
import itertools
from .util import fail
import contextlib
def emits_warning(*messages):
"""Mark a test as emitting a warning.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
# TODO: it would be nice to assert that a named warning was
# emitted. should work with some monkeypatching of warnings,
# and may work on non-CPython if they keep to the spirit of
# warnings.showwarning's docstring.
# - update: jython looks ok, it uses cpython's module
@decorator
def decorate(fn, *args, **kw):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SAWarning))
else:
filters.extend(dict(action='ignore',
message=message,
category=sa_exc.SAWarning)
for message in messages)
for f in filters:
warnings.filterwarnings(**f)
try:
return fn(*args, **kw)
finally:
resetwarnings()
return decorate
def emits_warning_on(db, *warnings):
"""Mark a test as emitting a warning on a specific dialect.
With no arguments, squelches all SAWarning failures. Or pass one or more
strings; these will be matched to the root of the warning description by
warnings.filterwarnings().
"""
spec = db_spec(db)
@decorator
def decorate(fn, *args, **kw):
if isinstance(db, util.string_types):
if not spec(config._current):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
else:
if not _is_excluded(*db):
return fn(*args, **kw)
else:
wrapped = emits_warning(*warnings)(fn)
return wrapped(*args, **kw)
return decorate
def uses_deprecated(*messages):
"""Mark a test as immune from fatal deprecation warnings.
With no arguments, squelches all SADeprecationWarning failures.
Or pass one or more strings; these will be matched to the root
of the warning description by warnings.filterwarnings().
As a special case, you may pass a function name prefixed with //
and it will be re-written as needed to match the standard warning
verbiage emitted by the sqlalchemy.util.deprecated decorator.
"""
@decorator
def decorate(fn, *args, **kw):
with expect_deprecated(*messages):
return fn(*args, **kw)
return decorate
@contextlib.contextmanager
def expect_deprecated(*messages):
# todo: should probably be strict about this, too
filters = [dict(action='ignore',
category=sa_exc.SAPendingDeprecationWarning)]
if not messages:
filters.append(dict(action='ignore',
category=sa_exc.SADeprecationWarning))
else:
filters.extend(
[dict(action='ignore',
message=message,
category=sa_exc.SADeprecationWarning)
for message in
[(m.startswith('//') and
('Call to deprecated function ' + m[2:]) or m)
for m in messages]])
for f in filters:
warnings.filterwarnings(**f)
try:
yield
finally:
resetwarnings()
def global_cleanup_assertions():
"""Check things that have to be finalized at the end of a test suite.
Hardcoded at the moment, a modular system can be built here
to support things like PG prepared transactions, tables all
dropped, etc.
"""
_assert_no_stray_pool_connections()
_STRAY_CONNECTION_FAILURES = 0
def _assert_no_stray_pool_connections():
global _STRAY_CONNECTION_FAILURES
# lazy gc on cPython means "do nothing." pool connections
# shouldn't be in cycles, should go away.
testutil.lazy_gc()
# however, once in awhile, on an EC2 machine usually,
# there's a ref in there. usually just one.
if pool._refs:
# OK, let's be somewhat forgiving. Increment a counter,
# we'll allow a couple of these at most.
_STRAY_CONNECTION_FAILURES += 1
print("Encountered a stray connection in test cleanup: %s"
% str(pool._refs))
# then do a real GC sweep. We shouldn't even be here
# so a single sweep should really be doing it, otherwise
# there's probably a real unreachable cycle somewhere.
testutil.gc_collect()
# if we've already had two of these occurrences, or
# after a hard gc sweep we still have pool._refs?!
# now we have to raise.
if _STRAY_CONNECTION_FAILURES >= 2 or pool._refs:
err = str(pool._refs)
# but clean out the pool refs collection directly,
# reset the counter,
# so the error doesn't at least keep happening.
pool._refs.clear()
_STRAY_CONNECTION_FAILURES = 0
assert False, "Stray conections in cleanup: %s" % err
def eq_(a, b, msg=None):
"""Assert a == b, with repr messaging on failure."""
assert a == b, msg or "%r != %r" % (a, b)
def ne_(a, b, msg=None):
"""Assert a != b, with repr messaging on failure."""
assert a != b, msg or "%r == %r" % (a, b)
def is_(a, b, msg=None):
"""Assert a is b, with repr messaging on failure."""
assert a is b, msg or "%r is not %r" % (a, b)
def is_not_(a, b, msg=None):
"""Assert a is not b, with repr messaging on failure."""
assert a is not b, msg or "%r is %r" % (a, b)
def startswith_(a, fragment, msg=None):
"""Assert a.startswith(fragment), with repr messaging on failure."""
assert a.startswith(fragment), msg or "%r does not start with %r" % (
a, fragment)
def assert_raises(except_cls, callable_, *args, **kw):
try:
callable_(*args, **kw)
success = False
except except_cls:
success = True
# assert outside the block so it works for AssertionError too !
assert success, "Callable did not raise an exception"
def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
try:
callable_(*args, **kwargs)
assert False, "Callable did not raise an exception"
except except_cls as e:
assert re.search(msg, util.text_type(e), re.UNICODE), "%r !~ %s" % (msg, e)
print(util.text_type(e).encode('utf-8'))
class AssertsCompiledSQL(object):
def assert_compile(self, clause, result, params=None,
checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
allow_dialect_select=False,
literal_binds=False):
if use_default_dialect:
dialect = default.DefaultDialect()
elif allow_dialect_select:
dialect = None
else:
if dialect is None:
dialect = getattr(self, '__dialect__', None)
if dialect is None:
dialect = config.db.dialect
elif dialect == 'default':
dialect = default.DefaultDialect()
elif isinstance(dialect, util.string_types):
dialect = url.URL(dialect).get_dialect()()
kw = {}
compile_kwargs = {}
if params is not None:
kw['column_keys'] = list(params)
if literal_binds:
compile_kwargs['literal_binds'] = True
if isinstance(clause, orm.Query):
context = clause._compile_context()
context.statement.use_labels = True
clause = context.statement
if compile_kwargs:
kw['compile_kwargs'] = compile_kwargs
c = clause.compile(dialect=dialect, **kw)
param_str = repr(getattr(c, 'params', {}))
if util.py3k:
param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
print(("\nSQL String:\n" + util.text_type(c) + param_str).encode('utf-8'))
else:
print("\nSQL String:\n" + util.text_type(c).encode('utf-8') + param_str)
cc = re.sub(r'[\n\t]', '', util.text_type(c))
eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect))
if checkparams is not None:
eq_(c.construct_params(params), checkparams)
if checkpositional is not None:
p = c.construct_params(params)
eq_(tuple([p[x] for x in c.positiontup]), checkpositional)
class ComparesTables(object):
def assert_tables_equal(self, table, reflected_table, strict_types=False):
assert len(table.c) == len(reflected_table.c)
for c, reflected_c in zip(table.c, reflected_table.c):
eq_(c.name, reflected_c.name)
assert reflected_c is reflected_table.c[c.name]
eq_(c.primary_key, reflected_c.primary_key)
eq_(c.nullable, reflected_c.nullable)
if strict_types:
msg = "Type '%s' doesn't correspond to type '%s'"
assert type(reflected_c.type) is type(c.type), \
msg % (reflected_c.type, c.type)
else:
self.assert_types_base(reflected_c, c)
if isinstance(c.type, sqltypes.String):
eq_(c.type.length, reflected_c.type.length)
eq_(
set([f.column.name for f in c.foreign_keys]),
set([f.column.name for f in reflected_c.foreign_keys])
)
if c.server_default:
assert isinstance(reflected_c.server_default,
schema.FetchedValue)
assert len(table.primary_key) == len(reflected_table.primary_key)
for c in table.primary_key:
assert reflected_table.primary_key.columns[c.name] is not None
def assert_types_base(self, c1, c2):
assert c1.type._compare_type_affinity(c2.type),\
"On column %r, type '%s' doesn't correspond to type '%s'" % \
(c1.name, c1.type, c2.type)
class AssertsExecutionResults(object):
def assert_result(self, result, class_, *objects):
result = list(result)
print(repr(result))
self.assert_list(result, class_, objects)
def assert_list(self, result, class_, list):
self.assert_(len(result) == len(list),
"result list is not the same size as test list, " +
"for class " + class_.__name__)
for i in range(0, len(list)):
self.assert_row(class_, result[i], list[i])
def assert_row(self, class_, rowobj, desc):
self.assert_(rowobj.__class__ is class_,
"item class is not " + repr(class_))
for key, value in desc.items():
if isinstance(value, tuple):
if isinstance(value[1], list):
self.assert_list(getattr(rowobj, key), value[0], value[1])
else:
self.assert_row(value[0], getattr(rowobj, key), value[1])
else:
self.assert_(getattr(rowobj, key) == value,
"attribute %s value %s does not match %s" % (
key, getattr(rowobj, key), value))
def assert_unordered_result(self, result, cls, *expected):
"""As assert_result, but the order of objects is not considered.
The algorithm is very expensive but not a big deal for the small
numbers of rows that the test suite manipulates.
"""
class immutabledict(dict):
def __hash__(self):
return id(self)
found = util.IdentitySet(result)
expected = set([immutabledict(e) for e in expected])
for wrong in util.itertools_filterfalse(lambda o: type(o) == cls, found):
fail('Unexpected type "%s", expected "%s"' % (
type(wrong).__name__, cls.__name__))
if len(found) != len(expected):
fail('Unexpected object count "%s", expected "%s"' % (
len(found), len(expected)))
NOVALUE = object()
def _compare_item(obj, spec):
for key, value in spec.items():
if isinstance(value, tuple):
try:
self.assert_unordered_result(
getattr(obj, key), value[0], *value[1])
except AssertionError:
return False
else:
if getattr(obj, key, NOVALUE) != value:
return False
return True
for expected_item in expected:
for found_item in found:
if _compare_item(found_item, expected_item):
found.remove(found_item)
break
else:
fail(
"Expected %s instance with attributes %s not found." % (
cls.__name__, repr(expected_item)))
return True
def assert_sql_execution(self, db, callable_, *rules):
assertsql.asserter.add_rules(rules)
try:
callable_()
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_sql(self, db, callable_, list_, with_sequences=None):
if with_sequences is not None and config.db.dialect.supports_sequences:
rules = with_sequences
else:
rules = list_
newrules = []
for rule in rules:
if isinstance(rule, dict):
newrule = assertsql.AllOf(*[
assertsql.ExactSQL(k, v) for k, v in rule.items()
])
else:
newrule = assertsql.ExactSQL(*rule)
newrules.append(newrule)
self.assert_sql_execution(db, callable_, *newrules)
def assert_sql_count(self, db, callable_, count):
self.assert_sql_execution(
db, callable_, assertsql.CountStatements(count))
@contextlib.contextmanager
def assert_execution(self, *rules):
assertsql.asserter.add_rules(rules)
try:
yield
assertsql.asserter.statement_complete()
finally:
assertsql.asserter.clear_rules()
def assert_statement_count(self, count):
return self.assert_execution(assertsql.CountStatements(count))
| gpl-3.0 |
bakerlover/project2 | lib/werkzeug/testsuite/contrib/fixers.py | 102 | 7333 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.fixers
~~~~~~~~~~~~~~~~~~~~~~~~~
Server / Browser fixers.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.datastructures import ResponseCacheControl
from werkzeug.http import parse_cache_control_header
from werkzeug.test import create_environ, Client
from werkzeug.wrappers import Request, Response
from werkzeug.contrib import fixers
from werkzeug.utils import redirect
@Request.application
def path_check_app(request):
return Response('PATH_INFO: %s\nSCRIPT_NAME: %s' % (
request.environ.get('PATH_INFO', ''),
request.environ.get('SCRIPT_NAME', '')
))
class ServerFixerTestCase(WerkzeugTestCase):
def test_cgi_root_fix(self):
app = fixers.CGIRootFix(path_check_app)
response = Response.from_app(app, dict(create_environ(),
SCRIPT_NAME='/foo',
PATH_INFO='/bar',
SERVER_SOFTWARE='lighttpd/1.4.27'
))
self.assert_equal(response.get_data(),
b'PATH_INFO: /foo/bar\nSCRIPT_NAME: ')
def test_cgi_root_fix_custom_app_root(self):
app = fixers.CGIRootFix(path_check_app, app_root='/baz/poop/')
response = Response.from_app(app, dict(create_environ(),
SCRIPT_NAME='/foo',
PATH_INFO='/bar'
))
self.assert_equal(response.get_data(), b'PATH_INFO: /foo/bar\nSCRIPT_NAME: baz/poop')
def test_path_info_from_request_uri_fix(self):
app = fixers.PathInfoFromRequestUriFix(path_check_app)
for key in 'REQUEST_URI', 'REQUEST_URL', 'UNENCODED_URL':
env = dict(create_environ(), SCRIPT_NAME='/test', PATH_INFO='/?????')
env[key] = '/test/foo%25bar?drop=this'
response = Response.from_app(app, env)
self.assert_equal(response.get_data(), b'PATH_INFO: /foo%bar\nSCRIPT_NAME: /test')
def test_proxy_fix(self):
@Request.application
def app(request):
return Response('%s|%s' % (
request.remote_addr,
# do not use request.host as this fixes too :)
request.environ['HTTP_HOST']
))
app = fixers.ProxyFix(app, num_proxies=2)
environ = dict(create_environ(),
HTTP_X_FORWARDED_PROTO="https",
HTTP_X_FORWARDED_HOST='example.com',
HTTP_X_FORWARDED_FOR='1.2.3.4, 5.6.7.8',
REMOTE_ADDR='127.0.0.1',
HTTP_HOST='fake'
)
response = Response.from_app(app, environ)
self.assert_equal(response.get_data(), b'1.2.3.4|example.com')
# And we must check that if it is a redirection it is
# correctly done:
redirect_app = redirect('/foo/bar.hml')
response = Response.from_app(redirect_app, environ)
wsgi_headers = response.get_wsgi_headers(environ)
assert wsgi_headers['Location'] == 'https://example.com/foo/bar.hml'
def test_proxy_fix_weird_enum(self):
@fixers.ProxyFix
@Request.application
def app(request):
return Response(request.remote_addr)
environ = dict(create_environ(),
HTTP_X_FORWARDED_FOR=',',
REMOTE_ADDR='127.0.0.1',
)
response = Response.from_app(app, environ)
self.assert_strict_equal(response.get_data(), b'127.0.0.1')
def test_header_rewriter_fix(self):
@Request.application
def application(request):
return Response("", headers=[
('X-Foo', 'bar')
])
application = fixers.HeaderRewriterFix(application, ('X-Foo',), (('X-Bar', '42'),))
response = Response.from_app(application, create_environ())
assert response.headers['Content-Type'] == 'text/plain; charset=utf-8'
assert 'X-Foo' not in response.headers
assert response.headers['X-Bar'] == '42'
class BrowserFixerTestCase(WerkzeugTestCase):
def test_ie_fixes(self):
@fixers.InternetExplorerFix
@Request.application
def application(request):
response = Response('binary data here', mimetype='application/vnd.ms-excel')
response.headers['Vary'] = 'Cookie'
response.headers['Content-Disposition'] = 'attachment; filename=foo.xls'
return response
c = Client(application, Response)
response = c.get('/', headers=[
('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)')
])
# IE gets no vary
self.assert_equal(response.get_data(), b'binary data here')
assert 'vary' not in response.headers
assert response.headers['content-disposition'] == 'attachment; filename=foo.xls'
assert response.headers['content-type'] == 'application/vnd.ms-excel'
# other browsers do
c = Client(application, Response)
response = c.get('/')
self.assert_equal(response.get_data(), b'binary data here')
assert 'vary' in response.headers
cc = ResponseCacheControl()
cc.no_cache = True
@fixers.InternetExplorerFix
@Request.application
def application(request):
response = Response('binary data here', mimetype='application/vnd.ms-excel')
response.headers['Pragma'] = ', '.join(pragma)
response.headers['Cache-Control'] = cc.to_header()
response.headers['Content-Disposition'] = 'attachment; filename=foo.xls'
return response
# IE has no pragma or cache control
pragma = ('no-cache',)
c = Client(application, Response)
response = c.get('/', headers=[
('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)')
])
self.assert_equal(response.get_data(), b'binary data here')
assert 'pragma' not in response.headers
assert 'cache-control' not in response.headers
assert response.headers['content-disposition'] == 'attachment; filename=foo.xls'
# IE has simplified pragma
pragma = ('no-cache', 'x-foo')
cc.proxy_revalidate = True
response = c.get('/', headers=[
('User-Agent', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)')
])
self.assert_equal(response.get_data(), b'binary data here')
assert response.headers['pragma'] == 'x-foo'
assert response.headers['cache-control'] == 'proxy-revalidate'
assert response.headers['content-disposition'] == 'attachment; filename=foo.xls'
# regular browsers get everything
response = c.get('/')
self.assert_equal(response.get_data(), b'binary data here')
assert response.headers['pragma'] == 'no-cache, x-foo'
cc = parse_cache_control_header(response.headers['cache-control'],
cls=ResponseCacheControl)
assert cc.no_cache
assert cc.proxy_revalidate
assert response.headers['content-disposition'] == 'attachment; filename=foo.xls'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ServerFixerTestCase))
suite.addTest(unittest.makeSuite(BrowserFixerTestCase))
return suite
| apache-2.0 |
taedori81/saleor | saleor/registration/models.py | 11 | 2121 | from __future__ import unicode_literals
from datetime import timedelta
from uuid import uuid4
from django.db import models
from django.contrib.auth import authenticate, get_user_model
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils import timezone
now = timezone.now
def default_valid_date():
return now() + timedelta(settings.ACCOUNT_ACTIVATION_DAYS)
class ExternalUserData(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='external_ids')
service = models.TextField(db_index=True)
username = models.TextField(db_index=True)
class Meta:
unique_together = [['service', 'username']]
class UniqueTokenManager(models.Manager): # this might end up in `utils`
def __init__(self, token_field):
self.token_field = token_field
super(UniqueTokenManager, self).__init__()
def create(self, **kwargs):
assert self.token_field not in kwargs, 'Token field already filled.'
kwargs[self.token_field] = str(uuid4())
return super(UniqueTokenManager, self).create(**kwargs)
class AbstractToken(models.Model):
token = models.CharField(max_length=36, unique=True)
valid_until = models.DateTimeField(default=default_valid_date)
objects = UniqueTokenManager(token_field='token')
class Meta:
abstract = True
class EmailConfirmationRequest(AbstractToken):
email = models.EmailField()
def get_authenticated_user(self):
user, _created = get_user_model().objects.get_or_create(email=self.email)
return authenticate(user=user)
def get_confirmation_url(self):
return reverse('registration:confirm_email',
kwargs={'token': self.token})
class EmailChangeRequest(AbstractToken):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="email_change_requests")
email = models.EmailField() # email address that user is switching to
def get_confirmation_url(self):
return reverse('registration:change_email',
kwargs={'token': self.token})
| bsd-3-clause |
nikolas/django-extensions | django_extensions/management/commands/admin_generator.py | 23 | 11785 | # -*- coding: utf-8 -*-
'''
The Django Admin Generator is a project which can automatically generate
(scaffold) a Django Admin for you. By doing this it will introspect your
models and automatically generate an Admin with properties like:
- `list_display` for all local fields
- `list_filter` for foreign keys with few items
- `raw_id_fields` for foreign keys with a lot of items
- `search_fields` for name and `slug` fields
- `prepopulated_fields` for `slug` fields
- `date_hierarchy` for `created_at`, `updated_at` or `joined_at` fields
The original source and latest version can be found here:
https://github.com/WoLpH/django-admin-generator/
'''
import optparse
import re
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import models
from django.db.models.loading import get_models
from django_extensions.compat import get_apps
from django_extensions.management.color import color_style
from django_extensions.management.utils import signalcommand
# Configurable constants
MAX_LINE_WIDTH = getattr(settings, 'MAX_LINE_WIDTH', 78)
INDENT_WIDTH = getattr(settings, 'INDENT_WIDTH', 4)
LIST_FILTER_THRESHOLD = getattr(settings, 'LIST_FILTER_THRESHOLD', 25)
RAW_ID_THRESHOLD = getattr(settings, 'RAW_ID_THRESHOLD', 100)
LIST_FILTER = getattr(settings, 'LIST_FILTER', (
models.DateField,
models.DateTimeField,
models.ForeignKey,
models.BooleanField,
))
SEARCH_FIELD_NAMES = getattr(settings, 'SEARCH_FIELD_NAMES', (
'name',
'slug',
))
DATE_HIERARCHY_NAMES = getattr(settings, 'DATE_HIERARCHY_NAMES', (
'joined_at',
'updated_at',
'created_at',
))
PREPOPULATED_FIELD_NAMES = getattr(settings, 'PREPOPULATED_FIELD_NAMES', (
'slug=name',
))
PRINT_IMPORTS = getattr(settings, 'PRINT_IMPORTS', '''# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import %(models)s
''')
PRINT_ADMIN_CLASS = getattr(settings, 'PRINT_ADMIN_CLASS', '''
class %(name)sAdmin(admin.ModelAdmin):%(class_)s
admin.site.register(%(name)s, %(name)sAdmin)
''')
PRINT_ADMIN_PROPERTY = getattr(settings, 'PRINT_ADMIN_PROPERTY', '''
%(key)s = %(value)s''')
class UnicodeMixin(object):
"""Mixin class to handle defining the proper __str__/__unicode__
methods in Python 2 or 3."""
if sys.version_info[0] >= 3: # Python 3
def __str__(self):
return self.__unicode__()
else: # Python 2
def __str__(self):
return self.__unicode__().encode('utf8')
class AdminApp(UnicodeMixin):
def __init__(self, app, model_res, **options):
self.app = app
self.model_res = model_res
self.options = options
def __iter__(self):
for model in get_models(self.app):
admin_model = AdminModel(model, **self.options)
for model_re in self.model_res:
if model_re.search(admin_model.name):
break
else:
if self.model_res:
continue
yield admin_model
def __unicode__(self):
return ''.join(self._unicode_generator())
def _unicode_generator(self):
models_list = [admin_model.name for admin_model in self]
yield PRINT_IMPORTS % dict(models=', '.join(models_list))
admin_model_names = []
for admin_model in self:
yield PRINT_ADMIN_CLASS % dict(
name=admin_model.name,
class_=admin_model,
)
admin_model_names.append(admin_model.name)
def __repr__(self):
return '<%s[%s]>' % (
self.__class__.__name__,
self.app,
)
class AdminModel(UnicodeMixin):
PRINTABLE_PROPERTIES = (
'list_display',
'list_filter',
'raw_id_fields',
'search_fields',
'prepopulated_fields',
'date_hierarchy',
)
def __init__(self, model, raw_id_threshold=RAW_ID_THRESHOLD,
list_filter_threshold=LIST_FILTER_THRESHOLD,
search_field_names=SEARCH_FIELD_NAMES,
date_hierarchy_names=DATE_HIERARCHY_NAMES,
prepopulated_field_names=PREPOPULATED_FIELD_NAMES, **options):
self.model = model
self.list_display = []
self.list_filter = []
self.raw_id_fields = []
self.search_fields = []
self.prepopulated_fields = {}
self.date_hierarchy = None
self.search_field_names = search_field_names
self.raw_id_threshold = raw_id_threshold
self.list_filter_threshold = list_filter_threshold
self.date_hierarchy_names = date_hierarchy_names
self.prepopulated_field_names = prepopulated_field_names
def __repr__(self):
return '<%s[%s]>' % (
self.__class__.__name__,
self.name,
)
@property
def name(self):
return self.model.__name__
def _process_many_to_many(self, meta):
raw_id_threshold = self.raw_id_threshold
for field in meta.local_many_to_many:
related_model = getattr(field.related, 'related_model', field.related.model)
related_objects = related_model.objects.all()
if(related_objects[:raw_id_threshold].count() < raw_id_threshold):
yield field.name
def _process_fields(self, meta):
parent_fields = meta.parents.values()
for field in meta.fields:
name = self._process_field(field, parent_fields)
if name:
yield name
def _process_foreign_key(self, field):
raw_id_threshold = self.raw_id_threshold
list_filter_threshold = self.list_filter_threshold
max_count = max(list_filter_threshold, raw_id_threshold)
related_model = getattr(field.related, 'related_model', field.related.model)
related_count = related_model.objects.all()
related_count = related_count[:max_count].count()
if related_count >= raw_id_threshold:
self.raw_id_fields.append(field.name)
elif related_count < list_filter_threshold:
self.list_filter.append(field.name)
else: # pragma: no cover
pass # Do nothing :)
def _process_field(self, field, parent_fields):
if field in parent_fields:
return
self.list_display.append(field.name)
if isinstance(field, LIST_FILTER):
if isinstance(field, models.ForeignKey):
self._process_foreign_key(field)
else:
self.list_filter.append(field.name)
if field.name in self.search_field_names:
self.search_fields.append(field.name)
return field.name
def __unicode__(self):
return ''.join(self._unicode_generator())
def _yield_value(self, key, value):
if isinstance(value, (list, set, tuple)):
return self._yield_tuple(key, tuple(value))
elif isinstance(value, dict):
return self._yield_dict(key, value)
elif isinstance(value, str):
return self._yield_string(key, value)
else: # pragma: no cover
raise TypeError('%s is not supported in %r' % (type(value), value))
def _yield_string(self, key, value, converter=repr):
return PRINT_ADMIN_PROPERTY % dict(
key=key,
value=converter(value),
)
def _yield_dict(self, key, value):
row_parts = []
row = self._yield_string(key, value)
if len(row) > MAX_LINE_WIDTH:
row_parts.append(self._yield_string(key, '{', str))
for k, v in value.items():
row_parts.append('%s%r: %r' % (2 * INDENT_WIDTH * ' ', k, v))
row_parts.append(INDENT_WIDTH * ' ' + '}')
row = '\n'.join(row_parts)
return row
def _yield_tuple(self, key, value):
row_parts = []
row = self._yield_string(key, value)
if len(row) > MAX_LINE_WIDTH:
row_parts.append(self._yield_string(key, '(', str))
for v in value:
row_parts.append(2 * INDENT_WIDTH * ' ' + repr(v) + ',')
row_parts.append(INDENT_WIDTH * ' ' + ')')
row = '\n'.join(row_parts)
return row
def _unicode_generator(self):
self._process()
for key in self.PRINTABLE_PROPERTIES:
value = getattr(self, key)
if value:
yield self._yield_value(key, value)
def _process(self):
meta = self.model._meta
self.raw_id_fields += list(self._process_many_to_many(meta))
field_names = list(self._process_fields(meta))
for field_name in self.date_hierarchy_names[::-1]:
if field_name in field_names and not self.date_hierarchy:
self.date_hierarchy = field_name
break
for k in sorted(self.prepopulated_field_names):
k, vs = k.split('=', 1)
vs = vs.split(',')
if k in field_names:
incomplete = False
for v in vs:
if v not in field_names:
incomplete = True
break
if not incomplete:
self.prepopulated_fields[k] = vs
self.processed = True
class Command(BaseCommand):
help = '''Generate a `admin.py` file for the given app (models)'''
option_list = BaseCommand.option_list + (
optparse.make_option(
'-s', '--search-field', action='append',
default=SEARCH_FIELD_NAMES,
help='Fields named like this will be added to `search_fields`'
' [default: %default]'),
optparse.make_option(
'-d', '--date-hierarchy', action='append',
default=DATE_HIERARCHY_NAMES,
help='A field named like this will be set as `date_hierarchy`'
' [default: %default]'),
optparse.make_option(
'-p', '--prepopulated-fields', action='append',
default=PREPOPULATED_FIELD_NAMES,
help='These fields will be prepopulated by the other field.'
'The field names can be specified like `spam=eggA,eggB,eggC`'
' [default: %default]'),
optparse.make_option(
'-l', '--list-filter-threshold', type='int',
default=LIST_FILTER_THRESHOLD, metavar='LIST_FILTER_THRESHOLD',
help='If a foreign key has less than LIST_FILTER_THRESHOLD items '
'it will be added to `list_filter` [default: %default]'),
optparse.make_option(
'-r', '--raw-id-threshold', type='int',
default=RAW_ID_THRESHOLD, metavar='RAW_ID_THRESHOLD',
help='If a foreign key has more than RAW_ID_THRESHOLD items '
'it will be added to `list_filter` [default: %default]'),
)
can_import_settings = True
@signalcommand
def handle(self, *args, **kwargs):
self.style = color_style()
installed_apps = dict((a.__name__.rsplit('.', 1)[0], a) for a in get_apps())
# Make sure we always have args
if not args:
args = [False]
app = installed_apps.get(args[0])
if not app:
print(self.style.WARN('This command requires an existing app name as argument'))
print(self.style.WARN('Available apps:'))
for app in sorted(installed_apps):
print(self.style.WARN(' %s' % app))
sys.exit(1)
model_res = []
for arg in args[1:]:
model_res.append(re.compile(arg, re.IGNORECASE))
self.handle_app(app, model_res, **kwargs)
def handle_app(self, app, model_res, **options):
print(AdminApp(app, model_res, **options))
| mit |
bebby520/essay_devel | venv/lib/python2.7/site-packages/requests/packages/chardet/constants.py | 3008 | 1335 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| apache-2.0 |
jnfsmile/zinnia | zinnia/tests/test_mixins.py | 7 | 17107 | """Test cases for Zinnia's mixins"""
from datetime import date
from django.test import TestCase
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.contrib.auth.tests.utils import skipIfCustomUser
from zinnia.models.entry import Entry
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.managers import PUBLISHED
from zinnia.tests.utils import datetime
from zinnia.signals import disconnect_entry_signals
from zinnia.views.mixins.archives import PreviousNextPublishedMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.prefetch_related import PrefetchRelatedMixin
from zinnia.views.mixins.prefetch_related import PrefetchCategoriesAuthorsMixin
from zinnia.views.mixins.templates import EntryQuerysetTemplateResponseMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
from zinnia.views.mixins.templates import \
EntryQuerysetArchiveTemplateResponseMixin
class MixinTestCase(TestCase):
"""Test cases for zinnia.views.mixins"""
maxDiff = None
def setUp(self):
disconnect_entry_signals()
def test_callable_queryset_mixin(self):
instance = CallableQuerysetMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
def qs():
return []
instance.queryset = qs
self.assertEqual(instance.get_queryset(),
[])
def test_entry_queryset_template_response_mixin(self):
instance = EntryQuerysetTemplateResponseMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_model_type)
self.assertRaises(ImproperlyConfigured,
instance.get_model_name)
instance.model_type = 'model'
instance.model_name = 'name'
self.assertEqual(instance.get_model_type(),
'model')
self.assertEqual(instance.get_model_name(),
'name')
self.assertEqual(instance.get_template_names(),
['zinnia/model/name/entry_list.html',
'zinnia/model/name_entry_list.html',
'zinnia/model/entry_list.html',
'zinnia/entry_list.html'])
instance.template_name = 'zinnia/entry_search.html'
self.assertEqual(instance.get_template_names(),
['zinnia/entry_search.html',
'zinnia/model/name/entry_list.html',
'zinnia/model/name_entry_list.html',
'zinnia/model/entry_list.html',
'zinnia/entry_list.html'])
def test_entry_queryset_archive_template_response_mixin(self):
def get_year():
return 2012
def get_week():
return 16
def get_month():
return '04'
def get_day():
return 21
instance = EntryQuerysetArchiveTemplateResponseMixin()
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_year = get_year
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_week = get_week
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_month = get_month
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.get_day = get_day
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/entry_archive.html',
'zinnia/archives/month/04/day/21/entry_archive.html',
'zinnia/archives/2012/day/21/entry_archive.html',
'zinnia/archives/day/21/entry_archive.html',
'zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
instance.template_name = 'zinnia/entry_search.html'
self.assertEqual(
instance.get_template_names(),
['zinnia/entry_search.html',
'zinnia/archives/2012/04/21/entry_archive.html',
'zinnia/archives/month/04/day/21/entry_archive.html',
'zinnia/archives/2012/day/21/entry_archive.html',
'zinnia/archives/day/21/entry_archive.html',
'zinnia/archives/2012/month/04/entry_archive.html',
'zinnia/archives/month/04/entry_archive.html',
'zinnia/archives/2012/week/16/entry_archive.html',
'zinnia/archives/week/16/entry_archive.html',
'zinnia/archives/2012/entry_archive.html',
'zinnia/archives/entry_archive.html',
'zinnia/entry_archive.html',
'entry_archive.html'])
def test_entry_archive_template_response_mixin(self):
class FakeEntry(object):
detail_template = 'entry_detail.html'
slug = 'my-fake-entry'
def get_year():
return 2012
def get_week():
return 16
def get_month():
return '04'
def get_day():
return 21
instance = EntryArchiveTemplateResponseMixin()
instance.get_year = get_year
instance.get_month = get_month
instance.get_week = get_week
instance.get_day = get_day
instance.object = FakeEntry()
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/my-fake-entry_entry_detail.html',
'zinnia/archives/month/04/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/day/21/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/04/21/my-fake-entry.html',
'zinnia/archives/month/04/day/21/my-fake-entry.html',
'zinnia/archives/2012/day/21/my-fake-entry.html',
'zinnia/archives/day/21/my-fake-entry.html',
'zinnia/archives/2012/04/21/entry_detail.html',
'zinnia/archives/month/04/day/21/entry_detail.html',
'zinnia/archives/2012/day/21/entry_detail.html',
'zinnia/archives/day/21/entry_detail.html',
'zinnia/archives/2012/month/04/my-fake-entry_entry_detail.html',
'zinnia/archives/month/04/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/month/04/my-fake-entry.html',
'zinnia/archives/month/04/my-fake-entry.html',
'zinnia/archives/2012/month/04/entry_detail.html',
'zinnia/archives/month/04/entry_detail.html',
'zinnia/archives/2012/week/16/my-fake-entry_entry_detail.html',
'zinnia/archives/week/16/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/week/16/my-fake-entry.html',
'zinnia/archives/week/16/my-fake-entry.html',
'zinnia/archives/2012/week/16/entry_detail.html',
'zinnia/archives/week/16/entry_detail.html',
'zinnia/archives/2012/my-fake-entry_entry_detail.html',
'zinnia/archives/2012/my-fake-entry.html',
'zinnia/archives/2012/entry_detail.html',
'zinnia/archives/my-fake-entry_entry_detail.html',
'zinnia/my-fake-entry_entry_detail.html',
'my-fake-entry_entry_detail.html',
'zinnia/archives/my-fake-entry.html',
'zinnia/my-fake-entry.html',
'my-fake-entry.html',
'zinnia/archives/entry_detail.html',
'zinnia/entry_detail.html',
'entry_detail.html'])
instance.object.detail_template = 'custom.html'
self.assertEqual(
instance.get_template_names(),
['zinnia/archives/2012/04/21/my-fake-entry_custom.html',
'zinnia/archives/month/04/day/21/my-fake-entry_custom.html',
'zinnia/archives/2012/day/21/my-fake-entry_custom.html',
'zinnia/archives/day/21/my-fake-entry_custom.html',
'zinnia/archives/2012/04/21/my-fake-entry.html',
'zinnia/archives/month/04/day/21/my-fake-entry.html',
'zinnia/archives/2012/day/21/my-fake-entry.html',
'zinnia/archives/day/21/my-fake-entry.html',
'zinnia/archives/2012/04/21/custom.html',
'zinnia/archives/month/04/day/21/custom.html',
'zinnia/archives/2012/day/21/custom.html',
'zinnia/archives/day/21/custom.html',
'zinnia/archives/2012/month/04/my-fake-entry_custom.html',
'zinnia/archives/month/04/my-fake-entry_custom.html',
'zinnia/archives/2012/month/04/my-fake-entry.html',
'zinnia/archives/month/04/my-fake-entry.html',
'zinnia/archives/2012/month/04/custom.html',
'zinnia/archives/month/04/custom.html',
'zinnia/archives/2012/week/16/my-fake-entry_custom.html',
'zinnia/archives/week/16/my-fake-entry_custom.html',
'zinnia/archives/2012/week/16/my-fake-entry.html',
'zinnia/archives/week/16/my-fake-entry.html',
'zinnia/archives/2012/week/16/custom.html',
'zinnia/archives/week/16/custom.html',
'zinnia/archives/2012/my-fake-entry_custom.html',
'zinnia/archives/2012/my-fake-entry.html',
'zinnia/archives/2012/custom.html',
'zinnia/archives/my-fake-entry_custom.html',
'zinnia/my-fake-entry_custom.html',
'my-fake-entry_custom.html',
'zinnia/archives/my-fake-entry.html',
'zinnia/my-fake-entry.html',
'my-fake-entry.html',
'zinnia/archives/custom.html',
'zinnia/custom.html',
'custom.html'])
def test_previous_next_published_mixin(self):
site = Site.objects.get_current()
params = {'title': 'Entry 1', 'content': 'Entry 1',
'slug': 'entry-1', 'status': PUBLISHED,
'publication_date': datetime(2012, 1, 1, 12)}
entry_1 = Entry.objects.create(**params)
entry_1.sites.add(site)
params = {'title': 'Entry 2', 'content': 'Entry 2',
'slug': 'entry-2', 'status': PUBLISHED,
'publication_date': datetime(2012, 3, 15, 12)}
entry_2 = Entry.objects.create(**params)
entry_2.sites.add(site)
params = {'title': 'Entry 3', 'content': 'Entry 3',
'slug': 'entry-3', 'status': PUBLISHED,
'publication_date': datetime(2013, 6, 2, 12)}
entry_3 = Entry.objects.create(**params)
entry_3.sites.add(site)
class EntryPreviousNextPublished(PreviousNextPublishedMixin):
def get_queryset(self):
return Entry.published.all()
test_date = datetime(2009, 12, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), None)
self.assertEqual(epnp.get_previous_month(test_date), None)
self.assertEqual(epnp.get_previous_day(test_date), None)
self.assertEqual(epnp.get_next_year(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2011, 12, 26))
self.assertEqual(epnp.get_next_month(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2012, 1, 1))
test_date = datetime(2012, 1, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), None)
self.assertEqual(epnp.get_previous_month(test_date), None)
self.assertEqual(epnp.get_previous_day(test_date), None)
self.assertEqual(epnp.get_next_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2012, 3, 12))
self.assertEqual(epnp.get_next_month(test_date), date(2012, 3, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2012, 3, 15))
test_date = datetime(2012, 3, 15)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), None)
self.assertEqual(epnp.get_previous_week(test_date), date(2011, 12, 26))
self.assertEqual(epnp.get_previous_month(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_next_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_next_week(test_date), date(2013, 5, 27))
self.assertEqual(epnp.get_next_month(test_date), date(2013, 6, 1))
self.assertEqual(epnp.get_next_day(test_date), date(2013, 6, 2))
test_date = datetime(2013, 6, 2)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), date(2012, 1, 1))
self.assertEqual(epnp.get_previous_week(test_date), date(2012, 3, 12))
self.assertEqual(epnp.get_previous_month(test_date), date(2012, 3, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2012, 3, 15))
self.assertEqual(epnp.get_next_year(test_date), None)
self.assertEqual(epnp.get_next_week(test_date), None)
self.assertEqual(epnp.get_next_month(test_date), None)
self.assertEqual(epnp.get_next_day(test_date), None)
test_date = datetime(2014, 5, 1)
epnp = EntryPreviousNextPublished()
self.assertEqual(epnp.get_previous_year(test_date), date(2013, 1, 1))
self.assertEqual(epnp.get_previous_week(test_date), date(2013, 5, 27))
self.assertEqual(epnp.get_previous_month(test_date), date(2013, 6, 1))
self.assertEqual(epnp.get_previous_day(test_date), date(2013, 6, 2))
self.assertEqual(epnp.get_next_year(test_date), None)
self.assertEqual(epnp.get_next_week(test_date), None)
self.assertEqual(epnp.get_next_month(test_date), None)
self.assertEqual(epnp.get_next_day(test_date), None)
def test_prefetch_related_mixin(self):
instance = PrefetchRelatedMixin()
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
instance.relation_names = 'string'
self.assertRaises(ImproperlyConfigured,
instance.get_queryset)
@skipIfCustomUser
def test_prefetch_categories_authors_mixin(self):
author = Author.objects.create_user(username='author',
email='author@example.com')
category = Category.objects.create(title='Category',
slug='category')
for i in range(3):
params = {'title': 'My entry',
'content': 'My content',
'slug': 'my-entry-%s' % i}
entry = Entry.objects.create(**params)
entry.authors.add(author)
entry.categories.add(category)
class View(object):
def get_queryset(self):
return Entry.objects.all()
class ViewCategoriesAuthorsPrefetched(
PrefetchCategoriesAuthorsMixin, View):
pass
with self.assertNumQueries(7):
for entry in View().get_queryset():
entry.authors.count()
entry.categories.count()
with self.assertNumQueries(3):
for entry in ViewCategoriesAuthorsPrefetched().get_queryset():
entry.authors.count()
entry.categories.count()
| bsd-3-clause |
rdeheele/odoo | openerp/tools/view_validation.py | 367 | 2303 | """ View validation code (using assertions, not the RNG schema). """
import logging
_logger = logging.getLogger(__name__)
def valid_page_in_book(arch):
"""A `page` node must be below a `book` node."""
return not arch.xpath('//page[not(ancestor::notebook)]')
def valid_field_in_graph(arch):
""" Children of ``graph`` can only be ``field`` """
for child in arch.xpath('/graph/child::*'):
if child.tag != 'field':
return False
return True
def valid_field_in_tree(arch):
""" Children of ``tree`` view must be ``field`` or ``button``."""
for child in arch.xpath('/tree/child::*'):
if child.tag not in ('field', 'button'):
return False
return True
def valid_att_in_field(arch):
""" ``field`` nodes must all have a ``@name`` """
return not arch.xpath('//field[not(@name)]')
def valid_att_in_label(arch):
""" ``label`` nodes must have a ``@for`` or a ``@string`` """
return not arch.xpath('//label[not(@for or @string)]')
def valid_att_in_form(arch):
return True
def valid_type_in_colspan(arch):
"""A `colspan` attribute must be an `integer` type."""
for attrib in arch.xpath('//*/@colspan'):
try:
int(attrib)
except:
return False
return True
def valid_type_in_col(arch):
"""A `col` attribute must be an `integer` type."""
for attrib in arch.xpath('//*/@col'):
try:
int(attrib)
except:
return False
return True
def valid_view(arch):
if arch.tag == 'form':
for pred in [valid_page_in_book, valid_att_in_form, valid_type_in_colspan,
valid_type_in_col, valid_att_in_field, valid_att_in_label]:
if not pred(arch):
_logger.error('Invalid XML: %s', pred.__doc__)
return False
elif arch.tag == 'graph':
for pred in [valid_field_in_graph, valid_att_in_field]:
if not pred(arch):
_logger.error('Invalid XML: %s', pred.__doc__)
return False
elif arch.tag == 'tree':
for pred in [valid_field_in_tree, valid_att_in_field]:
if not pred(arch):
_logger.error('Invalid XML: %s', pred.__doc__)
return False
return True
| agpl-3.0 |
gak/VintageousEx | tests/test_range.py | 1 | 3362 | import unittest
import re
from VintageousEx.test_runner import g_test_view
from VintageousEx.tests import select_bof
from VintageousEx.tests import select_eof
from VintageousEx.tests import select_line
from VintageousEx.vex.ex_range import EX_RANGE
from VintageousEx.vex.ex_range import new_calculate_range
from VintageousEx.vex.ex_range import calculate_relative_ref
from VintageousEx.vex.ex_range import calculate_address
class TestCalculateRelativeRef(unittest.TestCase):
def StartUp(self):
select_bof(g_test_view)
def tearDown(self):
select_bof(g_test_view)
def testCalculateRelativeRef(self):
values = (
(calculate_relative_ref(g_test_view, '.'), 1),
(calculate_relative_ref(g_test_view, '.', start_line=100), 101),
(calculate_relative_ref(g_test_view, '$'), 538),
(calculate_relative_ref(g_test_view, '$', start_line=100), 538),
)
for actual, expected in values:
self.assertEquals(actual, expected)
def testCalculateRelativeRef2(self):
self.assertEquals(calculate_relative_ref(g_test_view, '.'), 1)
self.assertEquals(calculate_relative_ref(g_test_view, '$'), 538)
select_line(g_test_view, 100)
self.assertEquals(calculate_relative_ref(g_test_view, '.'), 100)
select_line(g_test_view, 200)
self.assertEquals(calculate_relative_ref(g_test_view, '.'), 200)
class TestCalculatingRanges(unittest.TestCase):
def testCalculateCorrectRange(self):
values = (
(new_calculate_range(g_test_view, '0'), [(0, 0)]),
(new_calculate_range(g_test_view, '1'), [(1, 1)]),
(new_calculate_range(g_test_view, '1,1'), [(1, 1)]),
(new_calculate_range(g_test_view, '%,1'), [(1, 538)]),
(new_calculate_range(g_test_view, '1,%'), [(1, 538)]),
(new_calculate_range(g_test_view, '1+99,160-10'), [(100, 150)]),
(new_calculate_range(g_test_view, '/THIRTY/+10,100'), [(40, 100)]),
)
select_line(g_test_view, 31)
values += (
(new_calculate_range(g_test_view, '10,/THIRTY/'), [(10, 31)]),
(new_calculate_range(g_test_view, '10;/THIRTY/'), [(10, 30)]),
)
for actual, expected in values:
self.assertEquals(actual, expected)
def tearDown(self):
select_bof(g_test_view)
class CalculateAddress(unittest.TestCase):
def setUp(self):
select_eof(g_test_view)
def tearDown(self):
select_bof(g_test_view)
def testCalculateAddressCorrectly(self):
values = (
(dict(ref='100', offset=None, search_offsets=[]), 99),
(dict(ref='200', offset=None, search_offsets=[]), 199),
)
for v, expected in values:
self.assertEquals(calculate_address(g_test_view, v), expected)
def testOutOfBoundsAddressShouldReturnNone(self):
address = dict(ref='1000', offset=None, search_offsets=[])
self.assertEquals(calculate_address(g_test_view, address), None)
def testInvalidAddressShouldReturnNone(self):
address = dict(ref='XXX', offset=None, search_offsets=[])
self.assertRaises(AttributeError, calculate_address, g_test_view, address)
| mit |
Morloth1274/EVE-Online-POCO-manager | tests/test_corp.py | 1 | 37117 | import mock
from tests.compat import unittest
from tests.utils import APITestCase
import evelink.api as evelink_api
import evelink.corp as evelink_corp
API_RESULT_SENTINEL = evelink_api.APIResult(mock.sentinel.api_result, 12345, 67890)
class CorpTestCase(APITestCase):
def setUp(self):
super(CorpTestCase, self).setUp()
self.corp = evelink_corp.Corp(api=self.api)
def test_corporation_sheet_public(self):
self.api.get.return_value = self.make_api_result("corp/corporation_sheet.xml")
result, current, expires = self.corp.corporation_sheet(123)
self.assertEqual(result, {
'alliance': {'id': 150430947, 'name': 'The Dead Rabbits'},
'ceo': {'id': 150208955, 'name': 'Mark Roled'},
'description': "Garth's testing corp of awesome sauce, win sauce as it were. In this\n"
" corp...<br><br>IT HAPPENS ALL OVER",
'hq': {'id': 60003469,
'name': 'Jita IV - Caldari Business Tribunal Information Center'},
'id': 150212025,
'logo': {'graphic_id': 0,
'shapes': [{'color': 681, 'id': 448},
{'color': 676, 'id': 0},
{'color': 0, 'id': 418}]},
'members': {'current': 3},
'name': 'Banana Republic',
'shares': 1,
'tax_percent': 93.7,
'ticker': 'BR',
'url': 'some url',
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/CorporationSheet', params={'corporationID': 123}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_corporation_sheet(self):
self.api.get.return_value = self.make_api_result("corp/corporation_sheet.xml")
result, current, expires = self.corp.corporation_sheet()
self.assertEqual(result, {
'alliance': {'id': 150430947, 'name': 'The Dead Rabbits'},
'ceo': {'id': 150208955, 'name': 'Mark Roled'},
'description': "Garth's testing corp of awesome sauce, win sauce as it were. In this\n"
" corp...<br><br>IT HAPPENS ALL OVER",
'hangars': {1000: 'Division 1',
1001: 'Division 2',
1002: 'Division 3',
1003: 'Division 4',
1004: 'Division 5',
1005: 'Division 6',
1006: 'Division 7'},
'hq': {'id': 60003469,
'name': 'Jita IV - Caldari Business Tribunal Information Center'},
'id': 150212025,
'logo': {'graphic_id': 0,
'shapes': [{'color': 681, 'id': 448},
{'color': 676, 'id': 0},
{'color': 0, 'id': 418}]},
'members': {'current': 3, 'limit': 6300},
'name': 'Banana Republic',
'shares': 1,
'tax_percent': 93.7,
'ticker': 'BR',
'url': 'some url',
'wallets': {1000: 'Wallet Division 1',
1001: 'Wallet Division 2',
1002: 'Wallet Division 3',
1003: 'Wallet Division 4',
1004: 'Wallet Division 5',
1005: 'Wallet Division 6',
1006: 'Wallet Division 7'}
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/CorporationSheet', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_industry_jobs')
def test_industry_jobs(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.industry_jobs
result, current, expires = self.corp.industry_jobs()
self.assertEqual(result, mock.sentinel.industry_jobs)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/IndustryJobs', params={}),
])
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_industry_jobs')
def test_industry_jobs_history(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.industry_jobs
result, current, expires = self.corp.industry_jobs_history()
self.assertEqual(result, mock.sentinel.industry_jobs)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/IndustryJobsHistory', params={}),
])
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_npc_standings(self):
self.api.get.return_value = self.make_api_result("corp/npc_standings.xml")
result, current, expires = self.corp.npc_standings()
self.assertEqual(result, {
'agents': {
3008416: {
'id': 3008416,
'name': 'Antaken Kamola',
'standing': 2.71,
},
},
'corps': {
1000003: {
'id': 1000003,
'name': 'Prompt Delivery',
'standing': 0.97,
},
},
'factions': {
500019: {
'id': 500019,
'name': "Sansha's Nation",
'standing': -4.07,
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Standings', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_kills')
def test_kills(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.kills
result, current, expires = self.corp.kills()
self.assertEqual(result, mock.sentinel.kills)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/KillLog', params={}),
])
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_contract_bids')
def test_contract_bids(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_contract_bids
result, current, expires = self.corp.contract_bids()
self.assertEqual(result, mock.sentinel.parsed_contract_bids)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/ContractBids', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_contract_items')
def test_contract_items(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_contract_items
result, current, expires = self.corp.contract_items(12345)
self.assertEqual(result, mock.sentinel.parsed_contract_items)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/ContractItems', params={'contractID': 12345}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_contracts')
def test_contracts(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_contracts
result, current, expires = self.corp.contracts()
self.assertEqual(result, mock.sentinel.parsed_contracts)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Contracts', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_contact_list')
def test_contacts(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_contacts
result, current, expires = self.corp.contacts()
self.assertEqual(result, mock.sentinel.parsed_contacts)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/ContactList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_wallet_info(self):
self.api.get.return_value = self.make_api_result("corp/wallet_info.xml")
result, current, expires = self.corp.wallet_info()
self.assertEqual(result, {
1000: {'balance': 74171957.08, 'id': 4759, 'key': 1000},
1001: {'balance': 6.05, 'id': 5687, 'key': 1001},
1002: {'balance': 0.0, 'id': 5688, 'key': 1002},
1003: {'balance': 17349111.0, 'id': 5689, 'key': 1003},
1004: {'balance': 0.0, 'id': 5690, 'key': 1004},
1005: {'balance': 0.0, 'id': 5691, 'key': 1005},
1006: {'balance': 0.0, 'id': 5692, 'key': 1006},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/AccountBalance', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_wallet_journal')
def test_wallet_journal(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_journal
result, current, expires = self.corp.wallet_journal()
self.assertEqual(result, mock.sentinel.parsed_journal)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletJournal', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_wallet_journal_paged(self):
self.api.get.return_value = self.make_api_result("char/wallet_journal.xml")
self.corp.wallet_journal(before_id=1234)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletJournal', params={'fromID': 1234}),
])
def test_wallet_journal_limit(self):
self.api.get.return_value = self.make_api_result("char/wallet_journal.xml")
self.corp.wallet_journal(limit=100)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletJournal', params={'rowCount': 100}),
])
def test_wallet_journal_account_key(self):
self.api.get.return_value = self.make_api_result("char/wallet_journal.xml")
self.corp.wallet_journal(account='0003')
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletJournal', params={'accountKey': '0003'}),
])
@mock.patch('evelink.corp.parse_wallet_transactions')
def test_wallet_transcations(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_transactions
result, current, expires = self.corp.wallet_transactions()
self.assertEqual(result, mock.sentinel.parsed_transactions)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletTransactions', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_wallet_transactions_paged(self):
self.api.get.return_value = self.make_api_result("char/wallet_transactions.xml")
self.corp.wallet_transactions(before_id=1234)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletTransactions', params={'fromID': 1234}),
])
def test_wallet_transactions_limit(self):
self.api.get.return_value = self.make_api_result("char/wallet_transactions.xml")
self.corp.wallet_transactions(limit=100)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletTransactions', params={'rowCount': 100}),
])
def test_wallet_transactions_account_key(self):
self.api.get.return_value = self.make_api_result("char/wallet_transactions.xml")
self.corp.wallet_transactions(account='0004')
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/WalletTransactions', params={'accountKey': '0004'}),
])
@mock.patch('evelink.corp.parse_market_orders')
def test_orders(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_orders
result, current, expires = self.corp.orders()
self.assertEqual(result, mock.sentinel.parsed_orders)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MarketOrders', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_faction_warfare_stats(self):
self.api.get.return_value = self.make_api_result('corp/faction_warfare_stats.xml')
result, current, expires = self.corp.faction_warfare_stats()
self.assertEqual(result, {
'faction': {'id': 500001, 'name': 'Caldari State'},
'kills': {'total': 0, 'week': 0, 'yesterday': 0},
'pilots': 6,
'points': {'total': 0, 'week': 1144, 'yesterday': 0},
'start_ts': 1213135800,
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/FacWarStats', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
@mock.patch('evelink.corp.parse_assets')
def test_assets(self, mock_parse):
self.api.get.return_value = API_RESULT_SENTINEL
mock_parse.return_value = mock.sentinel.parsed_assets
result, current, expires = self.corp.assets()
self.assertEqual(result, mock.sentinel.parsed_assets)
self.assertEqual(mock_parse.mock_calls, [
mock.call(mock.sentinel.api_result),
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/AssetList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_shareholders(self):
self.api.get.return_value = self.make_api_result("corp/shareholders.xml")
result, current, expires = self.corp.shareholders()
self.assertEqual(result, {
'char': {
126891489: {
'corp': {
'id': 632257314,
'name': 'Corax.',
},
'id': 126891489,
'name': 'Dragonaire',
'shares': 1,
},
},
'corp': {
126891482: {
'id': 126891482,
'name': 'DragonaireCorp',
'shares': 1,
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Shareholders', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_titles(self):
self.api.get.return_value = self.make_api_result("corp/titles.xml")
result, current, expires = self.corp.titles()
self.assertEqual(result, {
1: {
'can_grant': {'at_base': {}, 'at_hq': {}, 'at_other': {}, 'global': {}},
'id': 1,
'name': 'Member',
'roles': {
'at_base': {},
'at_other': {},
'global': {},
'at_hq': {
8192: {
'description': 'Can take items from this divisions hangar',
'id': 8192,
'name': 'roleHangarCanTake1',
},
},
},
},
2: {
'can_grant': {'at_base': {}, 'at_hq': {}, 'at_other': {}, 'global': {}},
'id': 2,
'name': 'unused 1',
'roles': {'at_base': {}, 'at_hq': {}, 'at_other': {}, 'global': {}},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Titles', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_starbases(self):
self.api.get.return_value = self.make_api_result("corp/starbases.xml")
result, current, expires = self.corp.starbases()
self.assertEqual(result, {
100449451: {
'id': 100449451,
'location_id': 30000163,
'moon_id': 40010395,
'online_ts': 1244098851,
'standings_owner_id': 673381830,
'state': 'online',
'state_ts': 1323374621,
'type_id': 27538,
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/StarbaseList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_starbase_details(self):
self.api.get.return_value = self.make_api_result("corp/starbase_details.xml")
result, current, expires = self.corp.starbase_details(123)
self.assertEqual(result, {
'combat': {
'hostility': {
'aggression': {'enabled': False},
'sec_status': {'enabled': False, 'threshold': 0.0},
'standing': {'enabled': True, 'threshold': 9.9},
'war': {'enabled': True},
},
'standings_owner_id': 154683985,
},
'fuel': {16274: 18758, 16275: 2447},
'online_ts': 1240097429,
'permissions': {
'deploy': {
'anchor': 'Starbase Config',
'offline': 'Starbase Config',
'online': 'Starbase Config',
'unanchor': 'Starbase Config',
},
'forcefield': {'alliance': True, 'corp': True},
'fuel': {
'take': 'Alliance Members',
'view': 'Starbase Config',
},
},
'state': 'online',
'state_ts': 1241299896,
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/StarbaseDetail', params={'itemID': 123}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_members(self):
self.api.get.return_value = self.make_api_result("corp/members.xml")
result, current, expires = self.corp.members()
self.assertEqual(result, {
150336922: {
'base': {'id': 0, 'name': ''},
'can_grant': 0,
'id': 150336922,
'join_ts': 1181745540,
'location': {
'id': 60011566,
'name': 'Bourynes VII - Moon 2 - University of Caille School',
},
'logoff_ts': 1182029760,
'logon_ts': 1182028320,
'name': 'corpexport',
'roles': 0,
'ship_type': {'id': 606, 'name': 'Velator'},
'title': 'asdf',
},
150337897: {
'base': {'id': 0, 'name': ''},
'can_grant': 0,
'id': 150337897,
'join_ts': 1181826840,
'location': {
'id': 60011566,
'name': 'Bourynes VII - Moon 2 - University of Caille School',
},
'logoff_ts': 1182029700,
'logon_ts': 1182028440,
'name': 'corpslave',
'roles': 22517998271070336,
'ship_type': {'id': 670, 'name': 'Capsule'},
'title': '',
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MemberTracking', params={'extended': 1}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_members_not_extended(self):
self.api.get.return_value = self.make_api_result("corp/members.xml")
result, current, expires = self.corp.members(extended=False)
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MemberTracking', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_permissions(self):
self.api.get.return_value = self.make_api_result("corp/permissions.xml")
result, current, expires = self.corp.permissions()
self.assertEqual(result, {
123456789: {
'can_grant': {
'at_base': {4: 'Bar'},
'at_hq': {},
'at_other': {},
'global': {},
},
'id': 123456789,
'name': 'Tester',
'roles': {
'at_base': {},
'at_hq': {},
'at_other': {},
'global': {1: 'Foo'},
},
'titles': {
1: 'Member ',
512: 'Gas Attendant',
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MemberSecurity', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_permissions_log(self):
self.api.get.return_value = self.make_api_result("corp/permissions_log.xml")
result, current, expires = self.corp.permissions_log()
self.assertEqual(result, [
{
'timestamp': 1218131820,
'recipient': {'id': 1234567890, 'name': 'Tester'},
'roles': {
'after': {},
'before': {
8192: 'roleHangarCanTake1',
4398046511104: 'roleContainerCanTake1',
},
},
'role_type': 'at_other',
'issuer': {'id': 1234567890, 'name': 'Tester'},
},
{
'timestamp': 1218131820,
'recipient': {'id': 1234567890, 'name': 'Tester'},
'roles': {
'after': {},
'before': {
8192: 'roleHangarCanTake1',
},
},
'role_type': 'at_other',
'issuer': {'id': 1234567890, 'name': 'Tester'},
},
{
'timestamp': 1218131820,
'recipient': {'id': 1234567890, 'name': 'Tester'},
'roles': {
'after': {
16777216: 'roleHangarCanQuery5',
},
'before': {},
},
'role_type': 'at_other',
'issuer': {'id': 1234567890, 'name': 'Tester'},
},
{
'timestamp': 1215452820,
'recipient': {'id': 1234567890, 'name': 'Tester'},
'roles': {
'after': {},
'before': {
2199023255552: 'roleEquipmentConfig',
4503599627370496: 'roleJuniorAccountant',
},
},
'role_type': 'at_other',
'issuer': {'id': 1234567890, 'name': 'Tester'},
},
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MemberSecurityLog', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_stations(self):
self.api.get.return_value = self.make_api_result("corp/stations.xml")
result, current, expires = self.corp.stations()
self.assertEqual(result, {
61000368: {
'docking_fee_per_volume': 0.0,
'id': 61000368,
'name': 'Station Name Goes Here',
'office_fee': 25000000,
'owner_id': 857174087,
'reprocessing': {'cut': 0.025, 'efficiency': 0.5},
'standing_owner_id': 673381830,
'system_id': 30004181,
'type_id': 21645,
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/OutpostList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_station_services(self):
self.api.get.return_value = self.make_api_result("corp/station_services.xml")
result, current, expires = self.corp.station_services(123)
self.assertEqual(result, {
'Market': {
'name': 'Market',
'owner_id': 857174087,
'standing': {
'bad_surcharge': 10.0,
'good_discount': 0.0,
'minimum': 10.0,
},
},
'Repair Facilities': {
'name': 'Repair Facilities',
'owner_id': 857174087,
'standing': {
'bad_surcharge': 10.0,
'good_discount': 10.0,
'minimum': 10.0,
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/OutpostServiceDetail', params={'itemID': 123}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_medals(self):
self.api.get.return_value = self.make_api_result("corp/medals.xml")
result, current, expires = self.corp.medals()
self.assertEqual(result, {
1: {
'create_ts': 1345740633,
'creator_id': 2,
'description': 'A test medal.',
'id': 1,
'title': 'Test Medal',
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Medals', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_member_medals(self):
self.api.get.return_value = self.make_api_result("corp/member_medals.xml")
result, current, expires = self.corp.member_medals()
self.assertEqual(result, {
1302462525: {
24216: {
'char_id': 1302462525,
'issuer_id': 1824523597,
'medal_id': 24216,
'public': True,
'reason': 'Its True',
'timestamp': 1241319835,
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/MemberMedals', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_container_log(self):
self.api.get.return_value = self.make_api_result("corp/container_log.xml")
result, current, expires = self.corp.container_log()
self.assertEqual(result, [
{'action': 'Set Name',
'actor': {'id': 783037732, 'name': 'Halo Glory'},
'details': {'config': {'new': None, 'old': None},
'flag': 4,
'password_type': None,
'quantity': None,
'type_id': None},
'item': {'id': 2051471251, 'type_id': 17366},
'location_id': 60011728,
'timestamp': 1229847000},
{'action': 'Set Password',
'actor': {'id': 783037732, 'name': 'Halo Glory'},
'details': {'config': {'new': None, 'old': None},
'flag': 4,
'password_type': 'Config',
'quantity': None,
'type_id': None},
'item': {'id': 2051471251, 'type_id': 17366},
'location_id': 60011728,
'timestamp': 1229846940},
{'action': 'Configure',
'actor': {'id': 783037732, 'name': 'Halo Glory'},
'details': {'config': {'new': 0, 'old': 0},
'flag': 4,
'password_type': None,
'quantity': None,
'type_id': None},
'item': {'id': 2051471251, 'type_id': 17366},
'location_id': 60011728,
'timestamp': 1229846940},
{'action': 'Assemble',
'actor': {'id': 783037732, 'name': 'Halo Glory'},
'details': {'config': {'new': None, 'old': None},
'flag': 4,
'password_type': None,
'quantity': None,
'type_id': None},
'item': {'id': 2051471251, 'type_id': 17366},
'location_id': 60011728,
'timestamp': 1229846880}
])
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/ContainerLog', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_locations(self):
self.api.get.return_value = self.make_api_result("corp/locations.xml")
result, current, expires = self.corp.locations((1009661446486,1007448817800))
self.assertEqual(self.api.mock_calls, [
mock.call.get('corp/Locations', params={'IDs': (1009661446486,1007448817800),}),
])
self.assertEqual(result,
{1009661446486:
{
'id': 1009661446486,
'x': None,
'z': None,
'name': "Superawesome test Impairor",
'y': None,
},
1007448817800:
{
'id': 1007448817800,
'x': -170714848271.291,
'z': 208419106396.3,
'name': "A Whale",
'y': -1728060949.58229,
}
}
)
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_blueprints(self):
self.api.get.return_value = self.make_api_result("corp/blueprints.xml")
result, current, expires = self.corp.blueprints()
self.assertEqual(self.api.mock_calls, [mock.call.get('corp/Blueprints', params={}),])
self.assertEqual(result, {
1000000029372: {
'location_id': 60014929,
'type_id': 11568,
'type_name': 'Avatar Blueprint',
'location_flag': 4,
'quantity': 497,
'time_efficiency': 0,
'material_efficiency': 0,
'runs': -1,
},
1000000029377: {
'location_id': 60014929,
'type_id': 33876,
'type_name': 'Prophecy Blood Raiders Edition Blueprint',
'location_flag': 4,
'quantity': -2,
'time_efficiency': 0,
'material_efficiency': 0,
'runs': 20000,
},
})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_facilities(self):
self.api.get.return_value = self.make_api_result("corp/facilities.xml")
result, current, expires = self.corp.facilities()
self.assertEqual(self.api.mock_calls, [mock.call.get('corp/Facilities', params={}),])
self.assertEqual(result, {
61000936: {
'region': {
'id': 10000014,
'name': 'Catch',
},
'starbase_modifier': 0.0,
'system': {
'id': 30001255,
'name': 'Y-PNRL',
},
'tax': 0.0,
'type_id': 21644,
'type_name': 'Amarr Factory Outpost',
},
})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_customs_offices(self):
self.api.get.return_value = self.make_api_result("corp/customsoffices.xml")
result, current, expires = self.corp.customs_offices()
self.assertEqual(self.api.mock_calls, [mock.call.get('corp/CustomsOffices', params={}),])
self.assertEqual(result, {
1014625297736: {
'permissions': {
'alliance': True,
'standings': True,
'minimum_standing': -10.0,
},
'reinforce_hour': 11,
'system': {
'id': 30001428,
'name': 'Ibura',
},
'tax_rate': {
'alliance': 0.05,
'corp': 0.05,
'standings': {
'high': 0.05,
'good': 0.05,
'neutral': 0.1,
'bad': 0.1,
'horrible': 0.1,
},
},
},
})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
if __name__ == "__main__":
unittest.main()
| mit |
andmos/ansible | packaging/sdist/check-link-behavior.py | 114 | 1290 | #!/usr/bin/env python
"""Checks for link behavior required for sdist to retain symlinks."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import platform
import shutil
import sys
import tempfile
def main():
"""Main program entry point."""
temp_dir = tempfile.mkdtemp()
target_path = os.path.join(temp_dir, 'file.txt')
symlink_path = os.path.join(temp_dir, 'symlink.txt')
hardlink_path = os.path.join(temp_dir, 'hardlink.txt')
try:
with open(target_path, 'w'):
pass
os.symlink(target_path, symlink_path)
os.link(symlink_path, hardlink_path)
if not os.path.islink(symlink_path):
abort('Symbolic link not created.')
if not os.path.islink(hardlink_path):
# known issue on MacOS (Darwin)
abort('Hard link of symbolic link created as a regular file.')
finally:
shutil.rmtree(temp_dir)
def abort(reason):
"""
:type reason: str
"""
sys.exit('ERROR: %s\n'
'This will prevent symbolic links from being preserved in the resulting tarball.\n'
'Aborting creation of sdist on platform: %s'
% (reason, platform.system()))
if __name__ == '__main__':
main()
| gpl-3.0 |
xianjunzhengbackup/Cloud-Native-Python | env/lib/python3.6/site-packages/pip/_vendor/progress/__init__.py | 916 | 3023 | # Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division
from collections import deque
from datetime import timedelta
from math import ceil
from sys import stderr
from time import time
__version__ = '1.2'
class Infinite(object):
file = stderr
sma_window = 10
def __init__(self, *args, **kwargs):
self.index = 0
self.start_ts = time()
self._ts = self.start_ts
self._dt = deque(maxlen=self.sma_window)
for key, val in kwargs.items():
setattr(self, key, val)
def __getitem__(self, key):
if key.startswith('_'):
return None
return getattr(self, key, None)
@property
def avg(self):
return sum(self._dt) / len(self._dt) if self._dt else 0
@property
def elapsed(self):
return int(time() - self.start_ts)
@property
def elapsed_td(self):
return timedelta(seconds=self.elapsed)
def update(self):
pass
def start(self):
pass
def finish(self):
pass
def next(self, n=1):
if n > 0:
now = time()
dt = (now - self._ts) / n
self._dt.append(dt)
self._ts = now
self.index = self.index + n
self.update()
def iter(self, it):
for x in it:
yield x
self.next()
self.finish()
class Progress(Infinite):
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(*args, **kwargs)
self.max = kwargs.get('max', 100)
@property
def eta(self):
return int(ceil(self.avg * self.remaining))
@property
def eta_td(self):
return timedelta(seconds=self.eta)
@property
def percent(self):
return self.progress * 100
@property
def progress(self):
return min(1, self.index / self.max)
@property
def remaining(self):
return max(self.max - self.index, 0)
def start(self):
self.update()
def goto(self, index):
incr = index - self.index
self.next(incr)
def iter(self, it):
try:
self.max = len(it)
except TypeError:
pass
for x in it:
yield x
self.next()
self.finish()
| mit |
erigones/esdc-ce | api/mon/base/api_views.py | 1 | 7159 | from logging import getLogger
from django.http import Http404
from api.api_views import APIView
# noinspection PyProtectedMember
from api.fields import get_boolean_value
from api.exceptions import PermissionDenied
from api.task.response import FailureTaskResponse, mgmt_task_response
from api.mon import MonitoringServer
from api.mon.messages import LOG_MON_HOSTGROUP_CREATE, LOG_MON_HOSTGROUP_DELETE
from api.mon.base.serializers import HostgroupSerializer
from api.mon.base.tasks import (mon_template_list, mon_hostgroup_list, mon_hostgroup_get, mon_hostgroup_create,
mon_hostgroup_delete)
from que import TG_DC_BOUND, TG_DC_UNBOUND
from vms.models import DefaultDc
logger = getLogger(__name__)
class MonBaseView(APIView):
"""
Base class for MonTemplateView and MonHostgroupView, which are simple GET-only views.
"""
_apiview_ = None
_mon_server_ = None
api_object_identifier = NotImplemented
api_view_name_list = NotImplemented
api_view_name_manage = NotImplemented
mgmt_task_list = NotImplemented
def __init__(self, request, name, data, dc_bound=True):
super(MonBaseView, self).__init__(request)
self.name = name
self.data = data
self.dc_bound = dc_bound
def _create_apiview(self):
if self.name:
return {
'view': self.api_view_name_manage,
'method': self.request.method,
self.api_object_identifier: self.name,
}
else:
return {
'view': self.api_view_name_list,
'method': self.request.method
}
@property
def _apiview(self):
if self._apiview_ is None:
self._apiview_ = self._create_apiview()
return self._apiview_
@property
def _mon_server(self):
if self._mon_server_ is None:
self._mon_server_ = MonitoringServer(self.request.dc)
return self._mon_server_
def _create_task(self, task, msg=None, tidlock=None, cache_result=None, cache_timeout=None, task_kwargs=None):
if self.name:
args = (self.request.dc.id, self.name)
else:
args = (self.request.dc.id,)
# Add information for emergency task cleanup - see api.task.utils.mgmt_task decorator
kwargs = {'mon_server_id': self._mon_server.id, 'dc_bound': self.dc_bound}
if task_kwargs:
kwargs.update(task_kwargs)
# Add apiview information for task log purposes inside tasks
meta = {'apiview': self._apiview}
if msg:
meta['msg'] = msg
# WARNING: This will change the the task group.
# Please make sure that your request.dc is set to DefaultDC for dc_unbound tasks.
if self.dc_bound:
tg = TG_DC_BOUND
else:
tg = TG_DC_UNBOUND
return task.call(self.request, None, args, kwargs=kwargs, meta=meta, tg=tg, tidlock=tidlock,
cache_result=cache_result, cache_timeout=cache_timeout)
def _create_task_and_response(self, task, msg=None, detail_dict=None, tidlock=None, cache_result=None,
cache_timeout=None, task_kwargs=None):
tid, err, res = self._create_task(task, msg=msg, tidlock=tidlock, cache_result=cache_result,
cache_timeout=cache_timeout, task_kwargs=task_kwargs)
# Do not log on error
if err:
obj, msg = None, None
else:
obj = self._mon_server
return mgmt_task_response(self.request, tid, err, res, msg=msg, obj=obj, api_view=self._apiview,
detail_dict=detail_dict)
@classmethod
def generate_cache_key_base(cls, dc_name, dc_bound, full=False, extended=False):
return '%s:%s:%s:full=%s:extended=%s' % (cls.api_view_name_list, dc_name, dc_bound, full, extended)
@classmethod
def clear_cache(cls, dc_name, dc_bound, full=False, extended=False):
return cls.mgmt_task_list.clear_cache(cls.generate_cache_key_base(dc_name, dc_bound, full=full,
extended=extended))
def get_list(self, task, cache=False, cache_timeout=None):
task_kwargs = {'full': self.full, 'extended': self.extended}
if cache:
tidlock = self.generate_cache_key_base(self.request.dc.name, self.dc_bound, full=self.full,
extended=self.extended)
else:
tidlock = None
return self._create_task_and_response(task, tidlock=tidlock, cache_result=tidlock,
cache_timeout=cache_timeout, task_kwargs=task_kwargs)
class MonTemplateView(MonBaseView):
api_view_name_list = 'mon_template_list'
mgmt_task_list = mon_template_list
def get(self, many=False):
if many:
return self.get_list(mon_template_list, cache=True, cache_timeout=30)
else:
raise NotImplementedError
class MonHostgroupView(MonBaseView):
api_object_identifier = 'hostgroup_name'
api_view_name_list = 'mon_hostgroup_list'
api_view_name_manage = 'mon_hostgroup_manage'
mgmt_task_list = mon_hostgroup_list
@staticmethod
def is_dc_bound(data, default=True):
if bool(data):
return get_boolean_value(data.get('dc_bound', default))
else:
return default
@staticmethod
def switch_dc_to_default(request):
if not request.dc.is_default():
request.dc = DefaultDc() # Warning: Changing request.dc
logger.info('"%s %s" user="%s" _changed_ dc="%s" permissions=%s', request.method, request.path,
request.user.username, request.dc.name, request.dc_user_permissions)
if not request.dc.settings.MON_ZABBIX_ENABLED: # dc1_settings
raise Http404
@classmethod
def get_dc_bound(cls, request, data):
dc_bound = cls.is_dc_bound(data)
if not dc_bound:
if not request.user.is_staff:
raise PermissionDenied
cls.switch_dc_to_default(request)
return dc_bound
def get(self, many=False):
if many:
return self.get_list(mon_hostgroup_list, cache=True, cache_timeout=30)
else:
return self._create_task_and_response(mon_hostgroup_get)
def post(self):
self.data['name'] = self.name
ser = HostgroupSerializer(self.request, data=self.data)
if not ser.is_valid():
return FailureTaskResponse(self.request, ser.errors)
# noinspection PyStatementEffect
ser.data
return self._create_task_and_response(mon_hostgroup_create, msg=LOG_MON_HOSTGROUP_CREATE,
detail_dict=ser.detail_dict(force_full=True))
def delete(self):
return self._create_task_and_response(mon_hostgroup_delete, msg=LOG_MON_HOSTGROUP_DELETE,
detail_dict={'name': self.name})
| apache-2.0 |
samsu/neutron | plugins/vmware/vshield/vcns.py | 7 | 11148 | # Copyright 2013 VMware, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.vshield.common import VcnsApiClient
LOG = logging.getLogger(__name__)
HTTP_GET = "GET"
HTTP_POST = "POST"
HTTP_DELETE = "DELETE"
HTTP_PUT = "PUT"
URI_PREFIX = "/api/4.0/edges"
#FwaaS constants
FIREWALL_SERVICE = "firewall/config"
FIREWALL_RULE_RESOURCE = "rules"
#LbaaS Constants
LOADBALANCER_SERVICE = "loadbalancer/config"
VIP_RESOURCE = "virtualservers"
POOL_RESOURCE = "pools"
MONITOR_RESOURCE = "monitors"
APP_PROFILE_RESOURCE = "applicationprofiles"
# IPsec VPNaaS Constants
IPSEC_VPN_SERVICE = 'ipsec/config'
class Vcns(object):
def __init__(self, address, user, password):
self.address = address
self.user = user
self.password = password
self.jsonapi_client = VcnsApiClient.VcnsApiHelper(address, user,
password, 'json')
def do_request(self, method, uri, params=None, format='json', **kwargs):
LOG.debug("VcnsApiHelper('%(method)s', '%(uri)s', '%(body)s')", {
'method': method,
'uri': uri,
'body': jsonutils.dumps(params)})
if format == 'json':
header, content = self.jsonapi_client.request(method, uri, params)
else:
header, content = self.xmlapi_client.request(method, uri, params)
LOG.debug("Header: '%s'", header)
LOG.debug("Content: '%s'", content)
if content == '':
return header, {}
if kwargs.get('decode', True):
content = jsonutils.loads(content)
return header, content
def deploy_edge(self, request):
uri = URI_PREFIX + "?async=true"
return self.do_request(HTTP_POST, uri, request, decode=False)
def get_edge_id(self, job_id):
uri = URI_PREFIX + "/jobs/%s" % job_id
return self.do_request(HTTP_GET, uri, decode=True)
def get_edge_deploy_status(self, edge_id):
uri = URI_PREFIX + "/%s/status?getlatest=false" % edge_id
return self.do_request(HTTP_GET, uri, decode="True")
def delete_edge(self, edge_id):
uri = "%s/%s" % (URI_PREFIX, edge_id)
return self.do_request(HTTP_DELETE, uri)
def update_interface(self, edge_id, vnic):
uri = "%s/%s/vnics/%d" % (URI_PREFIX, edge_id, vnic['index'])
return self.do_request(HTTP_PUT, uri, vnic, decode=True)
def get_nat_config(self, edge_id):
uri = "%s/%s/nat/config" % (URI_PREFIX, edge_id)
return self.do_request(HTTP_GET, uri, decode=True)
def update_nat_config(self, edge_id, nat):
uri = "%s/%s/nat/config" % (URI_PREFIX, edge_id)
return self.do_request(HTTP_PUT, uri, nat, decode=True)
def delete_nat_rule(self, edge_id, rule_id):
uri = "%s/%s/nat/config/rules/%s" % (URI_PREFIX, edge_id, rule_id)
return self.do_request(HTTP_DELETE, uri, decode=True)
def get_edge_status(self, edge_id):
uri = "%s/%s/status?getlatest=false" % (URI_PREFIX, edge_id)
return self.do_request(HTTP_GET, uri, decode=True)
def get_edges(self):
uri = URI_PREFIX
return self.do_request(HTTP_GET, uri, decode=True)
def update_routes(self, edge_id, routes):
uri = "%s/%s/routing/config/static" % (URI_PREFIX, edge_id)
return self.do_request(HTTP_PUT, uri, routes)
def create_lswitch(self, lsconfig):
uri = "/api/ws.v1/lswitch"
return self.do_request(HTTP_POST, uri, lsconfig, decode=True)
def delete_lswitch(self, lswitch_id):
uri = "/api/ws.v1/lswitch/%s" % lswitch_id
return self.do_request(HTTP_DELETE, uri)
def get_loadbalancer_config(self, edge_id):
uri = self._build_uri_path(edge_id, LOADBALANCER_SERVICE)
return self.do_request(HTTP_GET, uri, decode=True)
def enable_service_loadbalancer(self, edge_id, config):
uri = self._build_uri_path(edge_id, LOADBALANCER_SERVICE)
return self.do_request(HTTP_PUT, uri, config)
def update_firewall(self, edge_id, fw_req):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE)
return self.do_request(HTTP_PUT, uri, fw_req)
def delete_firewall(self, edge_id):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE, None)
return self.do_request(HTTP_DELETE, uri)
def update_firewall_rule(self, edge_id, vcns_rule_id, fwr_req):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE,
FIREWALL_RULE_RESOURCE,
vcns_rule_id)
return self.do_request(HTTP_PUT, uri, fwr_req)
def delete_firewall_rule(self, edge_id, vcns_rule_id):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE,
FIREWALL_RULE_RESOURCE,
vcns_rule_id)
return self.do_request(HTTP_DELETE, uri)
def add_firewall_rule_above(self, edge_id, ref_vcns_rule_id, fwr_req):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE,
FIREWALL_RULE_RESOURCE)
uri += "?aboveRuleId=" + ref_vcns_rule_id
return self.do_request(HTTP_POST, uri, fwr_req)
def add_firewall_rule(self, edge_id, fwr_req):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE,
FIREWALL_RULE_RESOURCE)
return self.do_request(HTTP_POST, uri, fwr_req)
def get_firewall(self, edge_id):
uri = self._build_uri_path(edge_id, FIREWALL_SERVICE)
return self.do_request(HTTP_GET, uri, decode=True)
def get_firewall_rule(self, edge_id, vcns_rule_id):
uri = self._build_uri_path(
edge_id, FIREWALL_SERVICE,
FIREWALL_RULE_RESOURCE,
vcns_rule_id)
return self.do_request(HTTP_GET, uri, decode=True)
#
#Edge LBAAS call helper
#
def create_vip(self, edge_id, vip_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
VIP_RESOURCE)
return self.do_request(HTTP_POST, uri, vip_new)
def get_vip(self, edge_id, vip_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
VIP_RESOURCE, vip_vseid)
return self.do_request(HTTP_GET, uri, decode=True)
def update_vip(self, edge_id, vip_vseid, vip_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
VIP_RESOURCE, vip_vseid)
return self.do_request(HTTP_PUT, uri, vip_new)
def delete_vip(self, edge_id, vip_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
VIP_RESOURCE, vip_vseid)
return self.do_request(HTTP_DELETE, uri)
def create_pool(self, edge_id, pool_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
POOL_RESOURCE)
return self.do_request(HTTP_POST, uri, pool_new)
def get_pool(self, edge_id, pool_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
POOL_RESOURCE, pool_vseid)
return self.do_request(HTTP_GET, uri, decode=True)
def update_pool(self, edge_id, pool_vseid, pool_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
POOL_RESOURCE, pool_vseid)
return self.do_request(HTTP_PUT, uri, pool_new)
def delete_pool(self, edge_id, pool_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
POOL_RESOURCE, pool_vseid)
return self.do_request(HTTP_DELETE, uri)
def create_health_monitor(self, edge_id, monitor_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
MONITOR_RESOURCE)
return self.do_request(HTTP_POST, uri, monitor_new)
def get_health_monitor(self, edge_id, monitor_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
MONITOR_RESOURCE, monitor_vseid)
return self.do_request(HTTP_GET, uri, decode=True)
def update_health_monitor(self, edge_id, monitor_vseid, monitor_new):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
MONITOR_RESOURCE,
monitor_vseid)
return self.do_request(HTTP_PUT, uri, monitor_new)
def delete_health_monitor(self, edge_id, monitor_vseid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
MONITOR_RESOURCE,
monitor_vseid)
return self.do_request(HTTP_DELETE, uri)
def create_app_profile(self, edge_id, app_profile):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
APP_PROFILE_RESOURCE)
return self.do_request(HTTP_POST, uri, app_profile)
def update_app_profile(self, edge_id, app_profileid, app_profile):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
APP_PROFILE_RESOURCE, app_profileid)
return self.do_request(HTTP_PUT, uri, app_profile)
def delete_app_profile(self, edge_id, app_profileid):
uri = self._build_uri_path(
edge_id, LOADBALANCER_SERVICE,
APP_PROFILE_RESOURCE,
app_profileid)
return self.do_request(HTTP_DELETE, uri)
def update_ipsec_config(self, edge_id, ipsec_config):
uri = self._build_uri_path(edge_id, IPSEC_VPN_SERVICE)
return self.do_request(HTTP_PUT, uri, ipsec_config)
def delete_ipsec_config(self, edge_id):
uri = self._build_uri_path(edge_id, IPSEC_VPN_SERVICE)
return self.do_request(HTTP_DELETE, uri)
def get_ipsec_config(self, edge_id):
uri = self._build_uri_path(edge_id, IPSEC_VPN_SERVICE)
return self.do_request(HTTP_GET, uri)
def _build_uri_path(self, edge_id,
service,
resource=None,
resource_id=None,
parent_resource_id=None,
fields=None,
relations=None,
filters=None,
types=None,
is_attachment=False):
uri_prefix = "%s/%s/%s" % (URI_PREFIX, edge_id, service)
if resource:
res_path = resource + (resource_id and "/%s" % resource_id or '')
uri_path = "%s/%s" % (uri_prefix, res_path)
else:
uri_path = uri_prefix
return uri_path
| apache-2.0 |
CiscoSystems/vespa | neutron/tests/unit/test_extension_ext_gw_mode.py | 5 | 17930 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira Networks, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Salvatore Orlando, Nicira, Inc
#
import mock
from oslo.config import cfg
from webob import exc
from neutron.common import constants
from neutron.db import api as db_api
from neutron.db import external_net_db
from neutron.db import l3_db
from neutron.db import l3_gwmode_db
from neutron.db import models_v2
from neutron.extensions import l3
from neutron.extensions import l3_ext_gw_mode
from neutron.openstack.common import uuidutils
from neutron.tests import base
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_l3_plugin
_uuid = uuidutils.generate_uuid
FAKE_GW_PORT_ID = _uuid()
FAKE_GW_PORT_MAC = 'aa:bb:cc:dd:ee:ff'
FAKE_FIP_EXT_PORT_ID = _uuid()
FAKE_FIP_EXT_PORT_MAC = '11:22:33:44:55:66'
FAKE_FIP_INT_PORT_ID = _uuid()
FAKE_FIP_INT_PORT_MAC = 'aa:aa:aa:aa:aa:aa'
FAKE_ROUTER_PORT_ID = _uuid()
FAKE_ROUTER_PORT_MAC = 'bb:bb:bb:bb:bb:bb'
class TestExtensionManager(object):
def get_resources(self):
# Simulate extension of L3 attribute map
for key in l3.RESOURCE_ATTRIBUTE_MAP.keys():
l3.RESOURCE_ATTRIBUTE_MAP[key].update(
l3_ext_gw_mode.EXTENDED_ATTRIBUTES_2_0.get(key, {}))
return l3.L3.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
# A simple class for making a concrete class out of the mixin
# for the case of a plugin that integrates l3 routing.
class TestDbIntPlugin(test_l3_plugin.TestL3NatIntPlugin,
l3_gwmode_db.L3_NAT_db_mixin):
supported_extension_aliases = ["external-net", "router", "ext-gw-mode"]
# A simple class for making a concrete class out of the mixin
# for the case of a l3 router service plugin
class TestDbSepPlugin(test_l3_plugin.TestL3NatServicePlugin,
l3_gwmode_db.L3_NAT_db_mixin):
supported_extension_aliases = ["router", "ext-gw-mode"]
class TestL3GwModeMixin(base.BaseTestCase):
def setUp(self):
super(TestL3GwModeMixin, self).setUp()
self.target_object = TestDbIntPlugin()
# Patch the context
ctx_patcher = mock.patch('neutron.context', autospec=True)
mock_context = ctx_patcher.start()
self.addCleanup(db_api.clear_db)
self.addCleanup(ctx_patcher.stop)
self.context = mock_context.get_admin_context()
# This ensure also calls to elevated work in unit tests
self.context.elevated.return_value = self.context
self.context.session = db_api.get_session()
# Create sample data for tests
self.ext_net_id = _uuid()
self.int_net_id = _uuid()
self.int_sub_id = _uuid()
self.tenant_id = 'the_tenant'
self.network = models_v2.Network(
id=self.ext_net_id,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE)
self.net_ext = external_net_db.ExternalNetwork(
network_id=self.ext_net_id)
self.context.session.add(self.network)
# The following is to avoid complains from sqlite on
# foreign key violations
self.context.session.flush()
self.context.session.add(self.net_ext)
self.router = l3_db.Router(
id=_uuid(),
name=None,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE,
enable_snat=True,
gw_port_id=None)
self.context.session.add(self.router)
self.context.session.flush()
self.router_gw_port = models_v2.Port(
id=FAKE_GW_PORT_ID,
tenant_id=self.tenant_id,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_ROUTER_GW,
admin_state_up=True,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_GW_PORT_MAC,
network_id=self.ext_net_id)
self.router.gw_port_id = self.router_gw_port.id
self.context.session.add(self.router)
self.context.session.add(self.router_gw_port)
self.context.session.flush()
self.fip_ext_port = models_v2.Port(
id=FAKE_FIP_EXT_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_FLOATINGIP,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_FIP_EXT_PORT_MAC,
network_id=self.ext_net_id)
self.context.session.add(self.fip_ext_port)
self.context.session.flush()
self.int_net = models_v2.Network(
id=self.int_net_id,
tenant_id=self.tenant_id,
admin_state_up=True,
status=constants.NET_STATUS_ACTIVE)
self.int_sub = models_v2.Subnet(
id=self.int_sub_id,
tenant_id=self.tenant_id,
ip_version=4,
cidr='3.3.3.0/24',
gateway_ip='3.3.3.1',
network_id=self.int_net_id)
self.router_port = models_v2.Port(
id=FAKE_ROUTER_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id=self.router.id,
device_owner=l3_db.DEVICE_OWNER_ROUTER_INTF,
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_ROUTER_PORT_MAC,
network_id=self.int_net_id)
self.router_port_ip_info = models_v2.IPAllocation(
port_id=self.router_port.id,
network_id=self.int_net.id,
subnet_id=self.int_sub_id,
ip_address='3.3.3.1')
self.context.session.add(self.int_net)
self.context.session.add(self.int_sub)
self.context.session.add(self.router_port)
self.context.session.add(self.router_port_ip_info)
self.context.session.flush()
self.fip_int_port = models_v2.Port(
id=FAKE_FIP_INT_PORT_ID,
tenant_id=self.tenant_id,
admin_state_up=True,
device_id='something',
device_owner='compute:nova',
status=constants.PORT_STATUS_ACTIVE,
mac_address=FAKE_FIP_INT_PORT_MAC,
network_id=self.int_net_id)
self.fip_int_ip_info = models_v2.IPAllocation(
port_id=self.fip_int_port.id,
network_id=self.int_net.id,
subnet_id=self.int_sub_id,
ip_address='3.3.3.3')
self.fip = l3_db.FloatingIP(
id=_uuid(),
floating_ip_address='1.1.1.2',
floating_network_id=self.ext_net_id,
floating_port_id=FAKE_FIP_EXT_PORT_ID,
fixed_port_id=None,
fixed_ip_address=None,
router_id=None)
self.context.session.add(self.fip_int_port)
self.context.session.add(self.fip_int_ip_info)
self.context.session.add(self.fip)
self.context.session.flush()
self.fip_request = {'port_id': FAKE_FIP_INT_PORT_ID,
'tenant_id': self.tenant_id}
def _reset_ext_gw(self):
# Reset external gateway
self.router.gw_port_id = None
self.context.session.add(self.router)
self.context.session.flush()
def _test_update_router_gw(self, gw_info, expected_enable_snat):
self.target_object._update_router_gw_info(
self.context, self.router.id, gw_info)
router = self.target_object._get_router(
self.context, self.router.id)
try:
self.assertEqual(FAKE_GW_PORT_ID,
router.gw_port.id)
self.assertEqual(FAKE_GW_PORT_MAC,
router.gw_port.mac_address)
except AttributeError:
self.assertIsNone(router.gw_port)
self.assertEqual(expected_enable_snat, router.enable_snat)
def test_update_router_gw_with_gw_info_none(self):
self._test_update_router_gw(None, True)
def test_update_router_gw_with_network_only(self):
info = {'network_id': self.ext_net_id}
self._test_update_router_gw(info, True)
def test_update_router_gw_with_snat_disabled(self):
info = {'network_id': self.ext_net_id,
'enable_snat': False}
self._test_update_router_gw(info, False)
def test_make_router_dict_no_ext_gw(self):
self._reset_ext_gw()
router_dict = self.target_object._make_router_dict(self.router)
self.assertIsNone(router_dict[l3.EXTERNAL_GW_INFO])
def test_make_router_dict_with_ext_gw(self):
router_dict = self.target_object._make_router_dict(self.router)
self.assertEqual({'network_id': self.ext_net_id,
'enable_snat': True},
router_dict[l3.EXTERNAL_GW_INFO])
def test_make_router_dict_with_ext_gw_snat_disabled(self):
self.router.enable_snat = False
router_dict = self.target_object._make_router_dict(self.router)
self.assertEqual({'network_id': self.ext_net_id,
'enable_snat': False},
router_dict[l3.EXTERNAL_GW_INFO])
def test_build_routers_list_no_ext_gw(self):
self._reset_ext_gw()
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list([router_dict], [])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNone(router.get('gw_port'))
self.assertIsNone(router.get('enable_snat'))
def test_build_routers_list_with_ext_gw(self):
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list(
[router_dict], [self.router.gw_port])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNotNone(router.get('gw_port'))
self.assertEqual(FAKE_GW_PORT_ID, router['gw_port']['id'])
self.assertTrue(router.get('enable_snat'))
def test_build_routers_list_with_ext_gw_snat_disabled(self):
self.router.enable_snat = False
router_dict = self.target_object._make_router_dict(self.router)
routers = self.target_object._build_routers_list(
[router_dict], [self.router.gw_port])
self.assertEqual(1, len(routers))
router = routers[0]
self.assertIsNotNone(router.get('gw_port'))
self.assertEqual(FAKE_GW_PORT_ID, router['gw_port']['id'])
self.assertFalse(router.get('enable_snat'))
class ExtGwModeIntTestCase(test_db_plugin.NeutronDbPluginV2TestCase,
test_l3_plugin.L3NatTestCaseMixin):
def setUp(self, plugin=None, svc_plugins=None, ext_mgr=None):
# Store l3 resource attribute map as it will be updated
self._l3_attribute_map_bk = {}
for item in l3.RESOURCE_ATTRIBUTE_MAP:
self._l3_attribute_map_bk[item] = (
l3.RESOURCE_ATTRIBUTE_MAP[item].copy())
plugin = plugin or (
'neutron.tests.unit.test_extension_ext_gw_mode.TestDbIntPlugin')
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
ext_mgr = ext_mgr or TestExtensionManager()
super(ExtGwModeIntTestCase, self).setUp(plugin=plugin,
ext_mgr=ext_mgr,
service_plugins=svc_plugins)
self.addCleanup(self.restore_l3_attribute_map)
def restore_l3_attribute_map(self):
l3.RESOURCE_ATTRIBUTE_MAP = self._l3_attribute_map_bk
def tearDown(self):
super(ExtGwModeIntTestCase, self).tearDown()
def _set_router_external_gateway(self, router_id, network_id,
snat_enabled=None,
expected_code=exc.HTTPOk.code,
neutron_context=None):
ext_gw_info = {'network_id': network_id}
# Need to set enable_snat also if snat_enabled == False
if snat_enabled is not None:
ext_gw_info['enable_snat'] = snat_enabled
return self._update('routers', router_id,
{'router': {'external_gateway_info':
ext_gw_info}},
expected_code=expected_code,
neutron_context=neutron_context)
def test_router_create_show_no_ext_gwinfo(self):
name = 'router1'
tenant_id = _uuid()
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info', None)]
with self.router(name=name, admin_state_up=True,
tenant_id=tenant_id) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def _test_router_create_show_ext_gwinfo(self, snat_input_value,
snat_expected_value):
name = 'router1'
tenant_id = _uuid()
with self.subnet() as s:
ext_net_id = s['subnet']['network_id']
self._set_net_external(ext_net_id)
input_value = {'network_id': ext_net_id}
if snat_input_value in (True, False):
input_value['enable_snat'] = snat_input_value
expected_value = [('name', name), ('tenant_id', tenant_id),
('admin_state_up', True), ('status', 'ACTIVE'),
('external_gateway_info',
{'network_id': ext_net_id,
'enable_snat': snat_expected_value})]
with self.router(
name=name, admin_state_up=True, tenant_id=tenant_id,
external_gateway_info=input_value) as router:
res = self._show('routers', router['router']['id'])
for k, v in expected_value:
self.assertEqual(res['router'][k], v)
def test_router_create_show_ext_gwinfo_default(self):
self._test_router_create_show_ext_gwinfo(None, True)
def test_router_create_show_ext_gwinfo_with_snat_enabled(self):
self._test_router_create_show_ext_gwinfo(True, True)
def test_router_create_show_ext_gwinfo_with_snat_disabled(self):
self._test_router_create_show_ext_gwinfo(False, False)
def _test_router_update_ext_gwinfo(self, snat_input_value,
snat_expected_value=False,
expected_http_code=exc.HTTPOk.code):
with self.router() as r:
with self.subnet() as s:
try:
ext_net_id = s['subnet']['network_id']
self._set_net_external(ext_net_id)
self._set_router_external_gateway(
r['router']['id'], ext_net_id,
snat_enabled=snat_input_value,
expected_code=expected_http_code)
if expected_http_code != exc.HTTPOk.code:
return
body = self._show('routers', r['router']['id'])
res_gw_info = body['router']['external_gateway_info']
self.assertEqual(res_gw_info['network_id'], ext_net_id)
self.assertEqual(res_gw_info['enable_snat'],
snat_expected_value)
finally:
self._remove_external_gateway_from_router(
r['router']['id'], ext_net_id)
def test_router_update_ext_gwinfo_default(self):
self._test_router_update_ext_gwinfo(None, True)
def test_router_update_ext_gwinfo_with_snat_enabled(self):
self._test_router_update_ext_gwinfo(True, True)
def test_router_update_ext_gwinfo_with_snat_disabled(self):
self._test_router_update_ext_gwinfo(False, False)
def test_router_update_ext_gwinfo_with_invalid_snat_setting(self):
self._test_router_update_ext_gwinfo(
'xxx', None, expected_http_code=exc.HTTPBadRequest.code)
class ExtGwModeSepTestCase(ExtGwModeIntTestCase):
def setUp(self, plugin=None):
# Store l3 resource attribute map as it will be updated
self._l3_attribute_map_bk = {}
for item in l3.RESOURCE_ATTRIBUTE_MAP:
self._l3_attribute_map_bk[item] = (
l3.RESOURCE_ATTRIBUTE_MAP[item].copy())
plugin = plugin or (
'neutron.tests.unit.test_l3_plugin.TestNoL3NatPlugin')
# the L3 service plugin
l3_plugin = ('neutron.tests.unit.test_extension_ext_gw_mode.'
'TestDbSepPlugin')
svc_plugins = {'l3_plugin_name': l3_plugin}
# for these tests we need to enable overlapping ips
cfg.CONF.set_default('allow_overlapping_ips', True)
super(ExtGwModeSepTestCase, self).setUp(plugin=plugin,
svc_plugins=svc_plugins)
self.addCleanup(self.restore_l3_attribute_map)
| apache-2.0 |
alexryndin/ambari | ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/status_params.py | 4 | 1772 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.libraries.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions import StackFeature
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_name = default("/hostLevelParams/stack_name", None)
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
stack_supports_pid = stack_version_formatted and check_stack_feature(StackFeature.RANGER_KMS_PID_SUPPORT, stack_version_formatted)
ranger_kms_pid_dir = default("/configurations/kms-env/ranger_kms_pid_dir", "/var/run/ranger_kms")
ranger_kms_pid_file = format('{ranger_kms_pid_dir}/rangerkms.pid') | apache-2.0 |
fredmorcos/attic | projects/pet/archive/pet_python/Expense.py | 1 | 3174 | # This file is part of PET.
#
# PET is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PET is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PET. If not, see <http://www.gnu.org/licenses/>.
class Expense:
def __init__(self, date, amount, tags, note):
ESTR_BAD_AMT_VAL = "Bad amount value for an expense at date %s"
ESTR_BAD_TAG_LST = "Bad tags list for an expense at date %s"
ESTR_EMP_TAG_LST = "Tags list for an expense at date %s is empty"
ESTR_INV_NTE = "Note for an expense at date %s is invalid"
ESTR_INV_TAG = "Tag for an expense at date %s is invalid"
if type(amount) is not float and type(amount) is not int:
raise ValueError(ESTR_BAD_AMT_VAL % date)
elif type(tags) is not list:
raise ValueError(ESTR_BAD_TAG_LST % date)
elif len(tags) == 0:
raise ValueError(ESTR_EMP_TAG_LST % date)
elif note is not None and type(note) is not str:
raise ValueError(ESTR_INV_NTE % date)
for tag in tags:
if type(tag) is not str:
raise ValueError(ESTR_INV_TAG)
self.amount = float(amount)
self.tags = tags
self.note = note
def __str_tags__(self):
res = "["
for tag in self.tags:
res += tag + ", "
return res[:-2] + "]"
def __str_note__(self):
if self.note is None or self.note == "":
return "<none>"
else:
return self.note
def __str__(self):
return ("%.2f %s %s" % (self.amount, self.__str_tags__(),
self.__str_note__()))
def to_json(self):
return {"amount": self.amount, "tags": self.tags, "note": self.note}
@classmethod
def parse_amount(cls, amount_str):
return float(amount_str)
@classmethod
def parse_tags(cls, tags_str, opts):
__tags = tags_str.split(",")
new_tags = []
for tag in __tags:
if tag.strip() == "":
verbose_log(opts, "Ignoring empty tag")
continue
else:
new_tags += [tag.strip()]
if len(new_tags) == 0:
raise ValueError("List of tags is empty")
return new_tags
@classmethod
def parse_note(cls, note_str):
return note_str.strip()
@classmethod
def from_json(cls, date, data):
if data["amount"] is None:
raise ValueError("No amount value for expense at date" % date)
if data["tags"] is None:
raise ValueError("No tags value for expense at date" % date)
return Expense(date, data["amount"], data["tags"],
data.get("note", None))
| isc |
Foxfanmedium/python_training | OnlineCoursera/mail_ru/Python_1/env/Lib/site-packages/pygments/lexer.py | 27 | 31054 | # -*- coding: utf-8 -*-
"""
pygments.lexer
~~~~~~~~~~~~~~
Base lexer classes.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import re
import sys
import time
from pygments.filter import apply_filters, Filter
from pygments.filters import get_filter_by_name
from pygments.token import Error, Text, Other, _TokenType
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode
from pygments.regexopt import regex_opt
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
'default', 'words']
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
(b'\xff\xfe\0\0', 'utf-32'),
(b'\0\0\xfe\xff', 'utf-32be'),
(b'\xff\xfe', 'utf-16'),
(b'\xfe\xff', 'utf-16be')]
_default_analyse = staticmethod(lambda x: 0.0)
class LexerMeta(type):
"""
This metaclass automagically converts ``analyse_text`` methods into
static methods which always return float values.
"""
def __new__(mcs, name, bases, d):
if 'analyse_text' in d:
d['analyse_text'] = make_analysator(d['analyse_text'])
return type.__new__(mcs, name, bases, d)
@add_metaclass(LexerMeta)
class Lexer(object):
"""
Lexer for a specific language.
Basic options recognized:
``stripnl``
Strip leading and trailing newlines from the input (default: True).
``stripall``
Strip all leading and trailing whitespace from the input
(default: False).
``ensurenl``
Make sure that the input ends with a newline (default: True). This
is required for some lexers that consume input linewise.
.. versionadded:: 1.3
``tabsize``
If given and greater than 0, expand tabs in the input (default: 0).
``encoding``
If given, must be an encoding name. This encoding will be used to
convert the input string to Unicode, if it is not already a Unicode
string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
Latin1 detection. Can also be ``'chardet'`` to use the chardet
library, if it is installed.
``inencoding``
Overrides the ``encoding`` if given.
"""
#: Name of the lexer
name = None
#: Shortcuts for the lexer
aliases = []
#: File name globs
filenames = []
#: Secondary file name globs
alias_filenames = []
#: MIME types
mimetypes = []
#: Priority, should multiple lexers match and no content is provided
priority = 0
def __init__(self, **options):
self.options = options
self.stripnl = get_bool_opt(options, 'stripnl', True)
self.stripall = get_bool_opt(options, 'stripall', False)
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
self.tabsize = get_int_opt(options, 'tabsize', 0)
self.encoding = options.get('encoding', 'guess')
self.encoding = options.get('inencoding') or self.encoding
self.filters = []
for filter_ in get_list_opt(options, 'filters', ()):
self.add_filter(filter_)
def __repr__(self):
if self.options:
return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
self.options)
else:
return '<pygments.lexers.%s>' % self.__class__.__name__
def add_filter(self, filter_, **options):
"""
Add a new stream filter to this lexer.
"""
if not isinstance(filter_, Filter):
filter_ = get_filter_by_name(filter_, **options)
self.filters.append(filter_)
def analyse_text(text):
"""
Has to return a float between ``0`` and ``1`` that indicates
if a lexer wants to highlight this text. Used by ``guess_lexer``.
If this method returns ``0`` it won't highlight it in any case, if
it returns ``1`` highlighting with this lexer is guaranteed.
The `LexerMeta` metaclass automatically wraps this function so
that it works like a static method (no ``self`` or ``cls``
parameter) and the return value is automatically converted to
`float`. If the return value is an object that is boolean `False`
it's the same as if the return values was ``0.0``.
"""
def get_tokens(self, text, unfiltered=False):
"""
Return an iterable of (tokentype, value) pairs generated from
`text`. If `unfiltered` is set to `True`, the filtering mechanism
is bypassed even if filters are defined.
Also preprocess the text, i.e. expand tabs and strip it if
wanted and applies registered filters.
"""
if not isinstance(text, text_type):
if self.encoding == 'guess':
text, _ = guess_decode(text)
elif self.encoding == 'chardet':
try:
import chardet
except ImportError:
raise ImportError('To enable chardet encoding guessing, '
'please install the chardet library '
'from http://chardet.feedparser.org/')
# check for BOM first
decoded = None
for bom, encoding in _encoding_map:
if text.startswith(bom):
decoded = text[len(bom):].decode(encoding, 'replace')
break
# no BOM found, so use chardet
if decoded is None:
enc = chardet.detect(text[:1024]) # Guess using first 1KB
decoded = text.decode(enc.get('encoding') or 'utf-8',
'replace')
text = decoded
else:
text = text.decode(self.encoding)
if text.startswith(u'\ufeff'):
text = text[len(u'\ufeff'):]
else:
if text.startswith(u'\ufeff'):
text = text[len(u'\ufeff'):]
# text now *is* a unicode string
text = text.replace('\r\n', '\n')
text = text.replace('\r', '\n')
if self.stripall:
text = text.strip()
elif self.stripnl:
text = text.strip('\n')
if self.tabsize > 0:
text = text.expandtabs(self.tabsize)
if self.ensurenl and not text.endswith('\n'):
text += '\n'
def streamer():
for _, t, v in self.get_tokens_unprocessed(text):
yield t, v
stream = streamer()
if not unfiltered:
stream = apply_filters(stream, self.filters, self)
return stream
def get_tokens_unprocessed(self, text):
"""
Return an iterable of (index, tokentype, value) pairs where "index"
is the starting position of the token within the input text.
In subclasses, implement this method as a generator to
maximize effectiveness.
"""
raise NotImplementedError
class DelegatingLexer(Lexer):
"""
This lexer takes two lexer as arguments. A root lexer and
a language lexer. First everything is scanned using the language
lexer, afterwards all ``Other`` tokens are lexed using the root
lexer.
The lexers from the ``template`` lexer package use this base lexer.
"""
def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
self.root_lexer = _root_lexer(**options)
self.language_lexer = _language_lexer(**options)
self.needle = _needle
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
buffered = ''
insertions = []
lng_buffer = []
for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
if t is self.needle:
if lng_buffer:
insertions.append((len(buffered), lng_buffer))
lng_buffer = []
buffered += v
else:
lng_buffer.append((i, t, v))
if lng_buffer:
insertions.append((len(buffered), lng_buffer))
return do_insertions(insertions,
self.root_lexer.get_tokens_unprocessed(buffered))
# ------------------------------------------------------------------------------
# RegexLexer and ExtendedRegexLexer
#
class include(str): # pylint: disable=invalid-name
"""
Indicates that a state should include rules from another state.
"""
pass
class _inherit(object):
"""
Indicates the a state should inherit from its superclass.
"""
def __repr__(self):
return 'inherit'
inherit = _inherit() # pylint: disable=invalid-name
class combined(tuple): # pylint: disable=invalid-name
"""
Indicates a state combined from multiple states.
"""
def __new__(cls, *args):
return tuple.__new__(cls, args)
def __init__(self, *args):
# tuple.__init__ doesn't do anything
pass
class _PseudoMatch(object):
"""
A pseudo match object constructed from a string.
"""
def __init__(self, start, text):
self._text = text
self._start = start
def start(self, arg=None):
return self._start
def end(self, arg=None):
return self._start + len(self._text)
def group(self, arg=None):
if arg:
raise IndexError('No such group')
return self._text
def groups(self):
return (self._text,)
def groupdict(self):
return {}
def bygroups(*args):
"""
Callback that yields multiple actions for each group in the match.
"""
def callback(lexer, match, ctx=None):
for i, action in enumerate(args):
if action is None:
continue
elif type(action) is _TokenType:
data = match.group(i + 1)
if data:
yield match.start(i + 1), action, data
else:
data = match.group(i + 1)
if data is not None:
if ctx:
ctx.pos = match.start(i + 1)
for item in action(lexer,
_PseudoMatch(match.start(i + 1), data), ctx):
if item:
yield item
if ctx:
ctx.pos = match.end()
return callback
class _This(object):
"""
Special singleton used for indicating the caller class.
Used by ``using``.
"""
this = _This()
def using(_other, **kwargs):
"""
Callback that processes the match with a different lexer.
The keyword arguments are forwarded to the lexer, except `state` which
is handled separately.
`state` specifies the state that the new lexer will start in, and can
be an enumerable such as ('root', 'inline', 'string') or a simple
string which is assumed to be on top of the root state.
Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
"""
gt_kwargs = {}
if 'state' in kwargs:
s = kwargs.pop('state')
if isinstance(s, (list, tuple)):
gt_kwargs['stack'] = s
else:
gt_kwargs['stack'] = ('root', s)
if _other is this:
def callback(lexer, match, ctx=None):
# if keyword arguments are given the callback
# function has to create a new lexer instance
if kwargs:
# XXX: cache that somehow
kwargs.update(lexer.options)
lx = lexer.__class__(**kwargs)
else:
lx = lexer
s = match.start()
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
yield i + s, t, v
if ctx:
ctx.pos = match.end()
else:
def callback(lexer, match, ctx=None):
# XXX: cache that somehow
kwargs.update(lexer.options)
lx = _other(**kwargs)
s = match.start()
for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
yield i + s, t, v
if ctx:
ctx.pos = match.end()
return callback
class default:
"""
Indicates a state or state action (e.g. #pop) to apply.
For example default('#pop') is equivalent to ('', Token, '#pop')
Note that state tuples may be used as well.
.. versionadded:: 2.0
"""
def __init__(self, state):
self.state = state
class words(Future):
"""
Indicates a list of literal words that is transformed into an optimized
regex that matches any of the words.
.. versionadded:: 2.0
"""
def __init__(self, words, prefix='', suffix=''):
self.words = words
self.prefix = prefix
self.suffix = suffix
def get(self):
return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
class RegexLexerMeta(LexerMeta):
"""
Metaclass for RegexLexer, creates the self._tokens attribute from
self.tokens on the first instantiation.
"""
def _process_regex(cls, regex, rflags, state):
"""Preprocess the regular expression component of a token definition."""
if isinstance(regex, Future):
regex = regex.get()
return re.compile(regex, rflags).match
def _process_token(cls, token):
"""Preprocess the token component of a token definition."""
assert type(token) is _TokenType or callable(token), \
'token type must be simple type or callable, not %r' % (token,)
return token
def _process_new_state(cls, new_state, unprocessed, processed):
"""Preprocess the state transition action of a token definition."""
if isinstance(new_state, str):
# an existing state
if new_state == '#pop':
return -1
elif new_state in unprocessed:
return (new_state,)
elif new_state == '#push':
return new_state
elif new_state[:5] == '#pop:':
return -int(new_state[5:])
else:
assert False, 'unknown new state %r' % new_state
elif isinstance(new_state, combined):
# combine a new state from existing ones
tmp_state = '_tmp_%d' % cls._tmpname
cls._tmpname += 1
itokens = []
for istate in new_state:
assert istate != new_state, 'circular state ref %r' % istate
itokens.extend(cls._process_state(unprocessed,
processed, istate))
processed[tmp_state] = itokens
return (tmp_state,)
elif isinstance(new_state, tuple):
# push more than one state
for istate in new_state:
assert (istate in unprocessed or
istate in ('#pop', '#push')), \
'unknown new state ' + istate
return new_state
else:
assert False, 'unknown new state def %r' % new_state
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
assert type(state) is str, "wrong state name %r" % state
assert state[0] != '#', "invalid state name %r" % state
if state in processed:
return processed[state]
tokens = processed[state] = []
rflags = cls.flags
for tdef in unprocessed[state]:
if isinstance(tdef, include):
# it's a state reference
assert tdef != state, "circular state reference %r" % state
tokens.extend(cls._process_state(unprocessed, processed,
str(tdef)))
continue
if isinstance(tdef, _inherit):
# should be processed already, but may not in the case of:
# 1. the state has no counterpart in any parent
# 2. the state includes more than one 'inherit'
continue
if isinstance(tdef, default):
new_state = cls._process_new_state(tdef.state, unprocessed, processed)
tokens.append((re.compile('').match, None, new_state))
continue
assert type(tdef) is tuple, "wrong rule def %r" % tdef
try:
rex = cls._process_regex(tdef[0], rflags, state)
except Exception as err:
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
(tdef[0], state, cls, err))
token = cls._process_token(tdef[1])
if len(tdef) == 2:
new_state = None
else:
new_state = cls._process_new_state(tdef[2],
unprocessed, processed)
tokens.append((rex, token, new_state))
return tokens
def process_tokendef(cls, name, tokendefs=None):
"""Preprocess a dictionary of token definitions."""
processed = cls._all_tokens[name] = {}
tokendefs = tokendefs or cls.tokens[name]
for state in list(tokendefs):
cls._process_state(tokendefs, processed, state)
return processed
def get_tokendefs(cls):
"""
Merge tokens from superclasses in MRO order, returning a single tokendef
dictionary.
Any state that is not defined by a subclass will be inherited
automatically. States that *are* defined by subclasses will, by
default, override that state in the superclass. If a subclass wishes to
inherit definitions from a superclass, it can use the special value
"inherit", which will cause the superclass' state definition to be
included at that point in the state.
"""
tokens = {}
inheritable = {}
for c in cls.__mro__:
toks = c.__dict__.get('tokens', {})
for state, items in iteritems(toks):
curitems = tokens.get(state)
if curitems is None:
# N.b. because this is assigned by reference, sufficiently
# deep hierarchies are processed incrementally (e.g. for
# A(B), B(C), C(RegexLexer), B will be premodified so X(B)
# will not see any inherits in B).
tokens[state] = items
try:
inherit_ndx = items.index(inherit)
except ValueError:
continue
inheritable[state] = inherit_ndx
continue
inherit_ndx = inheritable.pop(state, None)
if inherit_ndx is None:
continue
# Replace the "inherit" value with the items
curitems[inherit_ndx:inherit_ndx+1] = items
try:
# N.b. this is the index in items (that is, the superclass
# copy), so offset required when storing below.
new_inh_ndx = items.index(inherit)
except ValueError:
pass
else:
inheritable[state] = inherit_ndx + new_inh_ndx
return tokens
def __call__(cls, *args, **kwds):
"""Instantiate cls after preprocessing its token definitions."""
if '_tokens' not in cls.__dict__:
cls._all_tokens = {}
cls._tmpname = 0
if hasattr(cls, 'token_variants') and cls.token_variants:
# don't process yet
pass
else:
cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
return type.__call__(cls, *args, **kwds)
@add_metaclass(RegexLexerMeta)
class RegexLexer(Lexer):
"""
Base for simple stateful regular expression-based lexers.
Simplifies the lexing process so that you need only
provide a list of states and regular expressions.
"""
#: Flags for compiling the regular expressions.
#: Defaults to MULTILINE.
flags = re.MULTILINE
#: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
#:
#: The initial state is 'root'.
#: ``new_state`` can be omitted to signify no state transition.
#: If it is a string, the state is pushed on the stack and changed.
#: If it is a tuple of strings, all states are pushed on the stack and
#: the current state will be the topmost.
#: It can also be ``combined('state1', 'state2', ...)``
#: to signify a new, anonymous state combined from the rules of two
#: or more existing ones.
#: Furthermore, it can be '#pop' to signify going back one step in
#: the state stack, or '#push' to push the current state on the stack
#: again.
#:
#: The tuple can also be replaced with ``include('state')``, in which
#: case the rules from the state named by the string are included in the
#: current one.
tokens = {}
def get_tokens_unprocessed(self, text, stack=('root',)):
"""
Split ``text`` into (tokentype, text) pairs.
``stack`` is the inital stack (default: ``['root']``)
"""
pos = 0
tokendefs = self._tokens
statestack = list(stack)
statetokens = tokendefs[statestack[-1]]
while 1:
for rexmatch, action, new_state in statetokens:
m = rexmatch(text, pos)
if m:
if action is not None:
if type(action) is _TokenType:
yield pos, action, m.group()
else:
for item in action(self, m):
yield item
pos = m.end()
if new_state is not None:
# state transition
if isinstance(new_state, tuple):
for state in new_state:
if state == '#pop':
statestack.pop()
elif state == '#push':
statestack.append(statestack[-1])
else:
statestack.append(state)
elif isinstance(new_state, int):
# pop
del statestack[new_state:]
elif new_state == '#push':
statestack.append(statestack[-1])
else:
assert False, "wrong state def: %r" % new_state
statetokens = tokendefs[statestack[-1]]
break
else:
# We are here only if all state tokens have been considered
# and there was not a match on any of them.
try:
if text[pos] == '\n':
# at EOL, reset state to "root"
statestack = ['root']
statetokens = tokendefs['root']
yield pos, Text, u'\n'
pos += 1
continue
yield pos, Error, text[pos]
pos += 1
except IndexError:
break
class LexerContext(object):
"""
A helper object that holds lexer position data.
"""
def __init__(self, text, pos, stack=None, end=None):
self.text = text
self.pos = pos
self.end = end or len(text) # end=0 not supported ;-)
self.stack = stack or ['root']
def __repr__(self):
return 'LexerContext(%r, %r, %r)' % (
self.text, self.pos, self.stack)
class ExtendedRegexLexer(RegexLexer):
"""
A RegexLexer that uses a context object to store its state.
"""
def get_tokens_unprocessed(self, text=None, context=None):
"""
Split ``text`` into (tokentype, text) pairs.
If ``context`` is given, use this lexer context instead.
"""
tokendefs = self._tokens
if not context:
ctx = LexerContext(text, 0)
statetokens = tokendefs['root']
else:
ctx = context
statetokens = tokendefs[ctx.stack[-1]]
text = ctx.text
while 1:
for rexmatch, action, new_state in statetokens:
m = rexmatch(text, ctx.pos, ctx.end)
if m:
if action is not None:
if type(action) is _TokenType:
yield ctx.pos, action, m.group()
ctx.pos = m.end()
else:
for item in action(self, m, ctx):
yield item
if not new_state:
# altered the state stack?
statetokens = tokendefs[ctx.stack[-1]]
# CAUTION: callback must set ctx.pos!
if new_state is not None:
# state transition
if isinstance(new_state, tuple):
for state in new_state:
if state == '#pop':
ctx.stack.pop()
elif state == '#push':
ctx.stack.append(ctx.stack[-1])
else:
ctx.stack.append(state)
elif isinstance(new_state, int):
# pop
del ctx.stack[new_state:]
elif new_state == '#push':
ctx.stack.append(ctx.stack[-1])
else:
assert False, "wrong state def: %r" % new_state
statetokens = tokendefs[ctx.stack[-1]]
break
else:
try:
if ctx.pos >= ctx.end:
break
if text[ctx.pos] == '\n':
# at EOL, reset state to "root"
ctx.stack = ['root']
statetokens = tokendefs['root']
yield ctx.pos, Text, u'\n'
ctx.pos += 1
continue
yield ctx.pos, Error, text[ctx.pos]
ctx.pos += 1
except IndexError:
break
def do_insertions(insertions, tokens):
"""
Helper for lexers which must combine the results of several
sublexers.
``insertions`` is a list of ``(index, itokens)`` pairs.
Each ``itokens`` iterable should be inserted at position
``index`` into the token stream given by the ``tokens``
argument.
The result is a combined token stream.
TODO: clean up the code here.
"""
insertions = iter(insertions)
try:
index, itokens = next(insertions)
except StopIteration:
# no insertions
for item in tokens:
yield item
return
realpos = None
insleft = True
# iterate over the token stream where we want to insert
# the tokens from the insertion list.
for i, t, v in tokens:
# first iteration. store the postition of first item
if realpos is None:
realpos = i
oldi = 0
while insleft and i + len(v) >= index:
tmpval = v[oldi:index - i]
yield realpos, t, tmpval
realpos += len(tmpval)
for it_index, it_token, it_value in itokens:
yield realpos, it_token, it_value
realpos += len(it_value)
oldi = index - i
try:
index, itokens = next(insertions)
except StopIteration:
insleft = False
break # not strictly necessary
yield realpos, t, v[oldi:]
realpos += len(v) - oldi
# leftover tokens
while insleft:
# no normal tokens, set realpos to zero
realpos = realpos or 0
for p, t, v in itokens:
yield realpos, t, v
realpos += len(v)
try:
index, itokens = next(insertions)
except StopIteration:
insleft = False
break # not strictly necessary
class ProfilingRegexLexerMeta(RegexLexerMeta):
"""Metaclass for ProfilingRegexLexer, collects regex timing info."""
def _process_regex(cls, regex, rflags, state):
if isinstance(regex, words):
rex = regex_opt(regex.words, prefix=regex.prefix,
suffix=regex.suffix)
else:
rex = regex
compiled = re.compile(rex, rflags)
def match_func(text, pos, endpos=sys.maxsize):
info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
t0 = time.time()
res = compiled.match(text, pos, endpos)
t1 = time.time()
info[0] += 1
info[1] += t1 - t0
return res
return match_func
@add_metaclass(ProfilingRegexLexerMeta)
class ProfilingRegexLexer(RegexLexer):
"""Drop-in replacement for RegexLexer that does profiling of its regexes."""
_prof_data = []
_prof_sort_index = 4 # defaults to time per call
def get_tokens_unprocessed(self, text, stack=('root',)):
# this needs to be a stack, since using(this) will produce nested calls
self.__class__._prof_data.append({})
for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
yield tok
rawdata = self.__class__._prof_data.pop()
data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
n, 1000 * t, 1000 * t / n)
for ((s, r), (n, t)) in rawdata.items()),
key=lambda x: x[self._prof_sort_index],
reverse=True)
sum_total = sum(x[3] for x in data)
print()
print('Profiling result for %s lexing %d chars in %.3f ms' %
(self.__class__.__name__, len(text), sum_total))
print('=' * 110)
print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
print('-' * 110)
for d in data:
print('%-20s %-65s %5d %8.4f %8.4f' % d)
print('=' * 110)
| apache-2.0 |
kvar/ansible | lib/ansible/modules/network/f5/bigip_monitor_dns.py | 23 | 35117 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_monitor_dns
short_description: Manage DNS monitors on a BIG-IP
description:
- Manages DNS monitors on a BIG-IP.
version_added: 2.7
options:
name:
description:
- Specifies the name of the monitor.
type: str
required: True
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(dns)
parent on the C(Common) partition.
type: str
default: /Common/dns
description:
description:
- The description of the monitor.
type: str
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run.
- This value B(must) be less than the C(timeout) value.
- When creating a new monitor, if this parameter is not provided, the
default C(5) will be used.
type: int
up_interval:
description:
- Specifies the interval for the system to use to perform the health check
when a resource is up.
- When C(0), specifies that the system uses the interval specified in
C(interval) to check the health of the resource.
- When any other number, enables specification of a different interval to
use when checking the health of a resource that is up.
- When creating a new monitor, if this parameter is not provided, the
default C(0) will be used.
type: int
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request.
- If the target responds within the set time period, it is considered up.
- If the target does not respond within the set time period, it is considered down.
- You can change this number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second.
- If this parameter is not provided when creating a new monitor, then the default
value will be C(16).
type: int
transparent:
description:
- Specifies whether the monitor operates in transparent mode.
- Monitors in transparent mode can monitor pool members through firewalls.
- When creating a new monitor, if this parameter is not provided, then the default
value will be C(no).
type: bool
reverse:
description:
- Specifies whether the monitor operates in reverse mode.
- When the monitor is in reverse mode, a successful receive string match
marks the monitored object down instead of up. You can use the
this mode only if you configure the C(receive) option.
- This parameter is not compatible with the C(time_until_up) parameter. If
C(time_until_up) is specified, it must be C(0). Or, if it already exists, it
must be C(0).
type: bool
receive:
description:
- Specifies the IP address that the monitor uses from the resource record sections
of the DNS response.
- The IP address should be specified in the dotted-decimal notation or IPv6 notation.
type: str
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up.
- A value of 0 will cause a node to be marked up immediately after a valid
response is received from the node.
- If this parameter is not provided when creating a new monitor, then the default
value will be C(0).
type: int
manual_resume:
description:
- Specifies whether the system automatically changes the status of a resource
to B(enabled) at the next successful monitor check.
- If you set this option to C(yes), you must manually re-enable the resource
before the system can use it for load balancing connections.
- When creating a new monitor, if this parameter is not specified, the default
value is C(no).
- When C(yes), specifies that you must manually re-enable the resource after an
unsuccessful monitor check.
- When C(no), specifies that the system automatically changes the status of a
resource to B(enabled) at the next successful monitor check.
type: bool
ip:
description:
- IP address part of the IP/port definition.
- If this parameter is not provided when creating a new monitor, then the
default value will be C(*).
type: str
port:
description:
- Port address part of the IP/port definition.
- If this parameter is not provided when creating a new monitor, then the default
value will be C(*).
- Note that if specifying an IP address, a value between 1 and 65535 must be specified.
type: str
query_name:
description:
- Specifies a query name for the monitor to use in a DNS query.
type: str
query_type:
description:
- Specifies the type of DNS query that the monitor sends.
- When creating a new monitor, if this parameter is not specified, the default
value is C(a).
- When C(a), specifies that the monitor will send a DNS query of type A.
- When C(aaaa), specifies that the monitor will send a DNS query of type AAAA.
type: str
choices:
- a
- aaaa
answer_section_contains:
description:
- Specifies the type of DNS query that the monitor sends.
- When creating a new monitor, if this value is not specified, the default
value is C(query-type).
- When C(query-type), specifies that the response should contain at least one
answer of which the resource record type matches the query type.
- When C(any-type), specifies that the DNS message should contain at least one answer.
- When C(anything), specifies that an empty answer is enough to mark the status of
the node up.
type: str
choices:
- any-type
- anything
- query-type
accept_rcode:
description:
- Specifies the RCODE required in the response for an up status.
- When creating a new monitor, if this parameter is not specified, the default
value is C(no-error).
- When C(no-error), specifies that the status of the node will be marked up if
the received DNS message has no error.
- When C(anything), specifies that the status of the node will be marked up
irrespective of the RCODE in the DNS message received.
- If this parameter is set to C(anything), it will disregard the C(receive)
string, and nullify it if the monitor is being updated.
type: str
choices:
- no-error
- anything
adaptive:
description:
- Specifies whether adaptive response time monitoring is enabled for this monitor.
- When C(yes), the monitor determines the state of a service based on how divergent
from the mean latency a monitor probe for that service is allowed to be.
Also, values for the C(allowed_divergence), C(adaptive_limit), and
and C(sampling_timespan) will be enforced.
- When C(disabled), the monitor determines the state of a service based on the
C(interval), C(up_interval), C(time_until_up), and C(timeout) monitor settings.
type: bool
allowed_divergence_type:
description:
- When specifying a new monitor, if C(adaptive) is C(yes), the default is
C(relative)
- When C(absolute), the number of milliseconds the latency of a monitor probe
can exceed the mean latency of a monitor probe for the service being probed.
In typical cases, if the monitor detects three probes in a row that miss the
latency value you set, the pool member or node is marked down.
- When C(relative), the percentage of deviation the latency of a monitor probe
can exceed the mean latency of a monitor probe for the service being probed.
type: str
choices:
- relative
- absolute
allowed_divergence_value:
description:
- When specifying a new monitor, if C(adaptive) is C(yes), and C(type) is
C(relative), the default is C(25) percent.
type: int
adaptive_limit:
description:
- Specifies the absolute number of milliseconds that may not be exceeded by a monitor
probe, regardless of C(allowed_divergence) setting, for a probe to be
considered successful.
- This value applies regardless of the value of the C(allowed_divergence) setting.
- While this value can be configured when C(adaptive) is C(no), it will not take
effect on the system until C(adaptive) is C(yes).
type: int
sampling_timespan:
description:
- Specifies the length, in seconds, of the probe history window that the system
uses to calculate the mean latency and standard deviation of a monitor probe.
- While this value can be configured when C(adaptive) is C(no), it will not take
effect on the system until C(adaptive) is C(yes).
type: int
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a DNS monitor
bigip_monitor_dns:
name: DNS-UDP-V6
interval: 2
query_name: localhost
query_type: aaaa
up_interval: 5
adaptive: no
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: str
sample: http
ip:
description: The new IP of IP/port definition.
returned: changed
type: str
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
adaptive:
description: Whether adaptive is enabled or not.
returned: changed
type: bool
sample: yes
accept_rcode:
description: RCODE required in the response for an up status.
returned: changed
type: str
sample: no-error
allowed_divergence_type:
description: Type of divergence used for adaptive response time monitoring.
returned: changed
type: str
sample: absolute
allowed_divergence_value:
description:
- Value of the type of divergence used for adaptive response time monitoring.
- May be C(percent) or C(ms) depending on whether C(relative) or C(absolute).
returned: changed
type: int
sample: 25
description:
description: The description of the monitor.
returned: changed
type: str
sample: Important Monitor
adaptive_limit:
description: Absolute number of milliseconds that may not be exceeded by a monitor probe.
returned: changed
type: int
sample: 200
sampling_timespan:
description: Absolute number of milliseconds that may not be exceeded by a monitor probe.
returned: changed
type: int
sample: 200
answer_section_contains:
description: Type of DNS query that the monitor sends.
returned: changed
type: str
sample: query-type
manual_resume:
description:
- Whether the system automatically changes the status of a resource to enabled at the
next successful monitor check.
returned: changed
type: str
sample: query-type
up_interval:
description: Interval for the system to use to perform the health check when a resource is up.
returned: changed
type: int
sample: 0
query_name:
description: Query name for the monitor to use in a DNS query.
returned: changed
type: str
sample: foo
query_type:
description: Type of DNS query that the monitor sends. Either C(a) or C(aaaa).
returned: changed
type: str
sample: aaaa
receive:
description: IP address that the monitor uses from the resource record sections of the DNS response.
returned: changed
type: str
sample: 2.3.2.4
reverse:
description: Whether the monitor operates in reverse mode.
returned: changed
type: bool
sample: yes
port:
description:
- Alias port or service for the monitor to check, on behalf of the pools or pool
members with which the monitor is associated.
returned: changed
type: str
sample: 80
transparent:
description: Whether the monitor operates in transparent mode.
returned: changed
type: bool
sample: no
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.ipaddress import validate_ip_v6_address
from library.module_utils.network.f5.ipaddress import validate_ip_address
from library.module_utils.network.f5.compare import cmp_str_with_none
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.ipaddress import validate_ip_v6_address
from ansible.module_utils.network.f5.ipaddress import validate_ip_address
from ansible.module_utils.network.f5.compare import cmp_str_with_none
class Parameters(AnsibleF5Parameters):
api_map = {
'acceptRcode': 'accept_rcode',
'adaptiveDivergenceType': 'allowed_divergence_type',
'adaptiveDivergenceValue': 'allowed_divergence_value',
'adaptiveLimit': 'adaptive_limit',
'adaptiveSamplingTimespan': 'sampling_timespan',
'answerContains': 'answer_section_contains',
'manualResume': 'manual_resume',
'timeUntilUp': 'time_until_up',
'upInterval': 'up_interval',
'qname': 'query_name',
'qtype': 'query_type',
'recv': 'receive',
'defaultsFrom': 'parent',
}
api_attributes = [
'adaptive',
'acceptRcode',
'adaptiveDivergenceType',
'adaptiveDivergenceValue',
'adaptiveLimit',
'adaptiveSamplingTimespan',
'answerContains',
'defaultsFrom',
'description',
'destination',
'interval',
'manualResume',
'qname',
'qtype',
'recv',
'reverse',
'timeout',
'timeUntilUp',
'transparent',
'upInterval',
'destination',
]
returnables = [
'adaptive',
'accept_rcode',
'allowed_divergence_type',
'allowed_divergence_value',
'description',
'adaptive_limit',
'sampling_timespan',
'answer_section_contains',
'manual_resume',
'time_until_up',
'up_interval',
'query_name',
'query_type',
'receive',
'reverse',
'timeout',
'interval',
'transparent',
'parent',
'ip',
'port',
]
updatables = [
'adaptive',
'accept_rcode',
'allowed_divergence_type',
'allowed_divergence_value',
'adaptive_limit',
'sampling_timespan',
'answer_section_contains',
'description',
'manual_resume',
'time_until_up',
'up_interval',
'query_name',
'query_type',
'receive',
'reverse',
'timeout',
'transparent',
'parent',
'destination',
'interval',
]
@property
def type(self):
return 'dns'
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
elif is_valid_ip(self._values['ip']):
return self._values['ip']
else:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def receive(self):
if self._values['receive'] is None:
return None
if self._values['receive'] == '':
return ''
if is_valid_ip(self._values['receive']):
return self._values['receive']
raise F5ModuleError(
"The specified 'receive' parameter must be either an IPv4 or v6 address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
@property
def manual_resume(self):
if self._values['manual_resume'] is None:
return None
elif self._values['manual_resume'] is True:
return 'enabled'
return 'disabled'
@property
def reverse(self):
if self._values['reverse'] is None:
return None
elif self._values['reverse'] is True:
return 'enabled'
return 'disabled'
@property
def transparent(self):
if self._values['transparent'] is None:
return None
elif self._values['transparent'] is True:
return 'enabled'
return 'disabled'
@property
def adaptive(self):
if self._values['adaptive'] is None:
return None
elif self._values['adaptive'] is True:
return 'enabled'
return 'disabled'
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def manual_resume(self):
return flatten_boolean(self._values['manual_resume'])
@property
def reverse(self):
return flatten_boolean(self._values['reverse'])
@property
def transparent(self):
return flatten_boolean(self._values['transparent'])
@property
def adaptive(self):
return flatten_boolean(self._values['adaptive'])
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/dns/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def _address_type_matches_query_type(self, type, validator):
if self.want.query_type == type and self.have.query_type == type:
if self.want.receive is not None and validator(self.want.receive):
return True
if self.have.receive is not None and validator(self.have.receive):
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.want.reverse == 'enabled':
if not self.want.receive and not self.have.receive:
raise F5ModuleError(
"A 'receive' string must be specified when setting 'reverse'."
)
if self.want.time_until_up != 0 and self.have.time_until_up != 0:
raise F5ModuleError(
"Monitors with the 'reverse' attribute are not currently compatible with 'time_until_up'."
)
if self._address_type_matches_query_type('a', validate_ip_v6_address):
raise F5ModuleError(
"Monitor has a IPv6 address. Only a 'query_type' of 'aaaa' is supported for IPv6."
)
elif self._address_type_matches_query_type('aaaa', validate_ip_address):
raise F5ModuleError(
"Monitor has a IPv4 address. Only a 'query_type' of 'a' is supported for IPv4."
)
if self.want.accept_rcode == 'anything':
if self.want.receive is not None and is_valid_ip(self.want.receive) and self.have.receive is not None:
raise F5ModuleError(
"No 'receive' string may be specified, or exist, when 'accept_rcode' is 'anything'."
)
elif self.want.receive is None and self.have.receive is not None:
self.want.update({'receive': ''})
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.want.reverse == 'enabled':
if self.want.time_until_up != 0:
raise F5ModuleError(
"Monitors with the 'reverse' attribute are not currently compatible with 'time_until_up'."
)
if not self.want.receive:
raise F5ModuleError(
"A 'receive' string must be specified when setting 'reverse'."
)
if self.want.receive is not None and validate_ip_v6_address(self.want.receive) and self.want.query_type == 'a':
raise F5ModuleError(
"Monitor has a IPv6 address. Only a 'query_type' of 'aaaa' is supported for IPv6."
)
elif self.want.receive is not None and validate_ip_address(self.want.receive) and self.want.query_type == 'aaaa':
raise F5ModuleError(
"Monitor has a IPv4 address. Only a 'query_type' of 'a' is supported for IPv4."
)
if self.want.accept_rcode == 'anything':
if self.want.receive is not None and is_valid_ip(self.want.receive):
raise F5ModuleError(
"No 'receive' string may be specified, or exist, when 'accept_rcode' is 'anything'."
)
elif self.want.receive is None:
self.want.update({'receive': ''})
if self.want.query_name is None:
raise F5ModuleError(
"'query_name' is required when creating a new DNS monitor."
)
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/dns/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/dns/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/dns/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/dns/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/dns'),
receive=dict(),
ip=dict(),
description=dict(),
port=dict(),
interval=dict(type='int'),
timeout=dict(type='int'),
manual_resume=dict(type='bool'),
reverse=dict(type='bool'),
transparent=dict(type='bool'),
time_until_up=dict(type='int'),
up_interval=dict(type='int'),
accept_rcode=dict(choices=['no-error', 'anything']),
adaptive=dict(type='bool'),
sampling_timespan=dict(type='int'),
adaptive_limit=dict(type='int'),
answer_section_contains=dict(
choices=['any-type', 'anything', 'query-type']
),
query_name=dict(),
query_type=dict(choices=['a', 'aaaa']),
allowed_divergence_type=dict(choices=['relative', 'absolute']),
allowed_divergence_value=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
40223245/40223245W11 | static/Brython3.1.0-20150301-090019/Lib/unittest/loader.py | 739 | 13883 | """Loading unittests."""
import os
import re
import sys
import traceback
import types
import functools
from fnmatch import fnmatch
from . import case, suite, util
__unittest = True
# what about .pyc or .pyo (etc)
# we would need to avoid loading the same tests multiple times
# from '.py', '.pyc' *and* '.pyo'
VALID_MODULE_NAME = re.compile(r'[_a-z]\w*\.py$', re.IGNORECASE)
def _make_failed_import_test(name, suiteClass):
message = 'Failed to import test module: %s\n%s' % (name, traceback.format_exc())
return _make_failed_test('ModuleImportFailure', name, ImportError(message),
suiteClass)
def _make_failed_load_tests(name, exception, suiteClass):
return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
def _make_failed_test(classname, methodname, exception, suiteClass):
def testFailure(self):
raise exception
attrs = {methodname: testFailure}
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
def _jython_aware_splitext(path):
if path.lower().endswith('$py.class'):
return path[:-9]
return os.path.splitext(path)[0]
class TestLoader(object):
"""
This class is responsible for loading tests according to various criteria
and returning them wrapped in a TestSuite
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = staticmethod(util.three_way_cmp)
suiteClass = suite.TestSuite
_top_level_dir = None
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
if issubclass(testCaseClass, suite.TestSuite):
raise TypeError("Test cases should not be derived from TestSuite." \
" Maybe you meant to derive from TestCase?")
testCaseNames = self.getTestCaseNames(testCaseClass)
if not testCaseNames and hasattr(testCaseClass, 'runTest'):
testCaseNames = ['runTest']
loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))
return loaded_suite
def loadTestsFromModule(self, module, use_load_tests=True):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if isinstance(obj, type) and issubclass(obj, case.TestCase):
tests.append(self.loadTestsFromTestCase(obj))
load_tests = getattr(module, 'load_tests', None)
tests = self.suiteClass(tests)
if use_load_tests and load_tests is not None:
try:
return load_tests(self, tests, None)
except Exception as e:
return _make_failed_load_tests(module.__name__, e,
self.suiteClass)
return tests
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = name.split('.')
if module is None:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy:
raise
parts = parts[1:]
obj = module
for part in parts:
parent, obj = obj, getattr(obj, part)
if isinstance(obj, types.ModuleType):
return self.loadTestsFromModule(obj)
elif isinstance(obj, type) and issubclass(obj, case.TestCase):
return self.loadTestsFromTestCase(obj)
elif (isinstance(obj, types.FunctionType) and
isinstance(parent, type) and
issubclass(parent, case.TestCase)):
name = parts[-1]
inst = parent(name)
# static methods follow a different path
if not isinstance(getattr(inst, name), types.FunctionType):
return self.suiteClass([inst])
elif isinstance(obj, suite.TestSuite):
return obj
if callable(obj):
test = obj()
if isinstance(test, suite.TestSuite):
return test
elif isinstance(test, case.TestCase):
return self.suiteClass([test])
else:
raise TypeError("calling %s returned %s, not a test" %
(obj, test))
else:
raise TypeError("don't know how to make test from: %s" % obj)
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = [self.loadTestsFromName(name, module) for name in names]
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
def isTestMethod(attrname, testCaseClass=testCaseClass,
prefix=self.testMethodPrefix):
return attrname.startswith(prefix) and \
callable(getattr(testCaseClass, attrname))
testFnNames = list(filter(isTestMethod, dir(testCaseClass)))
if self.sortTestMethodsUsing:
testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))
return testFnNames
def discover(self, start_dir, pattern='test*.py', top_level_dir=None):
"""Find and return all test modules from the specified start
directory, recursing into subdirectories to find them and return all
tests found within them. Only test files that match the pattern will
be loaded. (Using shell style pattern matching.)
All test modules must be importable from the top level of the project.
If the start directory is not the top level directory then the top
level directory must be specified separately.
If a test package name (directory with '__init__.py') matches the
pattern then the package will be checked for a 'load_tests' function. If
this exists then it will be called with loader, tests, pattern.
If load_tests exists then discovery does *not* recurse into the package,
load_tests is responsible for loading all tests in the package.
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
# make top_level_dir optional if called from load_tests in a package
top_level_dir = self._top_level_dir
elif top_level_dir is None:
set_implicit_top = True
top_level_dir = start_dir
top_level_dir = os.path.abspath(top_level_dir)
if not top_level_dir in sys.path:
# all test modules must be importable from the top level directory
# should we *unconditionally* put the start directory in first
# in sys.path to minimise likelihood of conflicts between installed
# modules and development versions?
sys.path.insert(0, top_level_dir)
self._top_level_dir = top_level_dir
is_not_importable = False
if os.path.isdir(os.path.abspath(start_dir)):
start_dir = os.path.abspath(start_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
else:
# support for discovery from dotted module names
try:
__import__(start_dir)
except ImportError:
is_not_importable = True
else:
the_module = sys.modules[start_dir]
top_part = start_dir.split('.')[0]
start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
if set_implicit_top:
self._top_level_dir = self._get_directory_containing_module(top_part)
sys.path.remove(top_level_dir)
if is_not_importable:
raise ImportError('Start directory is not importable: %r' % start_dir)
tests = list(self._find_tests(start_dir, pattern))
return self.suiteClass(tests)
def _get_directory_containing_module(self, module_name):
module = sys.modules[module_name]
full_path = os.path.abspath(module.__file__)
if os.path.basename(full_path).lower().startswith('__init__.py'):
return os.path.dirname(os.path.dirname(full_path))
else:
# here we have been given a module rather than a package - so
# all we can do is search the *same* directory the module is in
# should an exception be raised instead
return os.path.dirname(full_path)
def _get_name_from_path(self, path):
path = _jython_aware_splitext(os.path.normpath(path))
_relpath = os.path.relpath(path, self._top_level_dir)
assert not os.path.isabs(_relpath), "Path must be within the project"
assert not _relpath.startswith('..'), "Path must be within the project"
name = _relpath.replace(os.path.sep, '.')
return name
def _get_module_from_name(self, name):
__import__(name)
return sys.modules[name]
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
paths = os.listdir(start_dir)
for path in paths:
full_path = os.path.join(start_dir, path)
if os.path.isfile(full_path):
if not VALID_MODULE_NAME.match(path):
# valid Python identifiers only
continue
if not self._match_path(path, full_path, pattern):
continue
# if the test file matches, load it
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
mod_file = os.path.abspath(getattr(module, '__file__', full_path))
realpath = _jython_aware_splitext(os.path.realpath(mod_file))
fullpath_noext = _jython_aware_splitext(os.path.realpath(full_path))
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = _jython_aware_splitext(os.path.basename(full_path))
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?")
raise ImportError(msg % (mod_name, module_dir, expected_dir))
yield self.loadTestsFromModule(module)
elif os.path.isdir(full_path):
if not os.path.isfile(os.path.join(full_path, '__init__.py')):
continue
load_tests = None
tests = None
if fnmatch(path, pattern):
# only check load_tests if the package directory itself matches the filter
name = self._get_name_from_path(full_path)
package = self._get_module_from_name(name)
load_tests = getattr(package, 'load_tests', None)
tests = self.loadTestsFromModule(package, use_load_tests=False)
if load_tests is None:
if tests is not None:
# tests loaded from package file
yield tests
# recurse into the package
for test in self._find_tests(full_path, pattern):
yield test
else:
try:
yield load_tests(self, tests, pattern)
except Exception as e:
yield _make_failed_load_tests(package.__name__, e,
self.suiteClass)
defaultTestLoader = TestLoader()
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass:
loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=util.three_way_cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=util.three_way_cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(
testCaseClass)
def findTestCases(module, prefix='test', sortUsing=util.three_way_cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(\
module)
| gpl-3.0 |
salfab/CouchPotatoServer | libs/pytwitter/__init__.py | 104 | 175429 | #!/usr/bin/env python
#
# vim: sw=2 ts=2 sts=2
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a Python interface to the Twitter API'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '1.0.1'
import calendar
import datetime
import httplib
import os
import rfc822
import sys
import tempfile
import textwrap
import time
import urllib
import urllib2
import urlparse
import gzip
import StringIO
try:
# Python >= 2.6
import json as simplejson
except ImportError:
try:
# Python < 2.6
import simplejson
except ImportError:
try:
# Google App Engine
from django.utils import simplejson
except ImportError:
raise ImportError, "Unable to load a json library"
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl, parse_qs
except ImportError:
from cgi import parse_qsl, parse_qs
try:
from hashlib import md5
except ImportError:
from md5 import md5
import oauth2 as oauth
CHARACTER_LIMIT = 140
# A singleton representing a lazily instantiated FileCache.
DEFAULT_CACHE = object()
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.favorite_count
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.location
status.relative_created_at # read only
status.user
status.urls
status.user_mentions
status.hashtags
status.geo
status.place
status.coordinates
status.contributors
'''
def __init__(self,
created_at = None,
favorited = None,
favorite_count = None,
id = None,
text = None,
location = None,
user = None,
in_reply_to_screen_name = None,
in_reply_to_user_id = None,
in_reply_to_status_id = None,
truncated = None,
source = None,
now = None,
urls = None,
user_mentions = None,
hashtags = None,
media = None,
geo = None,
place = None,
coordinates = None,
contributors = None,
retweeted = None,
retweeted_status = None,
current_user_retweet = None,
retweet_count = None,
possibly_sensitive = None,
scopes = None,
withheld_copyright = None,
withheld_in_countries = None,
withheld_scope = None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at:
The time this status message was posted. [Optional]
favorited:
Whether this is a favorite of the authenticated user. [Optional]
favorite_count:
Number of times this status message has been favorited. [Optional]
id:
The unique id of this status message. [Optional]
text:
The text of this status message. [Optional]
location:
the geolocation string associated with this message. [Optional]
relative_created_at:
A human readable string representing the posting time. [Optional]
user:
A twitter.User instance representing the person posting the
message. [Optional]
now:
The current time, if the client chooses to set it.
Defaults to the wall clock time. [Optional]
urls:
user_mentions:
hashtags:
geo:
place:
coordinates:
contributors:
retweeted:
retweeted_status:
current_user_retweet:
retweet_count:
possibly_sensitive:
scopes:
withheld_copyright:
withheld_in_countries:
withheld_scope:
'''
self.created_at = created_at
self.favorited = favorited
self.favorite_count = favorite_count
self.id = id
self.text = text
self.location = location
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.retweeted = retweeted
self.source = source
self.urls = urls
self.user_mentions = user_mentions
self.hashtags = hashtags
self.media = media
self.geo = geo
self.place = place
self.coordinates = coordinates
self.contributors = contributors
self.retweeted_status = retweeted_status
self.current_user_retweet = current_user_retweet
self.retweet_count = retweet_count
self.possibly_sensitive = possibly_sensitive
self.scopes = scopes
self.withheld_copyright = withheld_copyright
self.withheld_in_countries = withheld_in_countries
self.withheld_scope = withheld_scope
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at:
The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc = "The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited:
boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc = 'The favorited state of this status message.')
def GetFavoriteCount(self):
'''Get the favorite count of this status message.
Returns:
number of times this status message has been favorited
'''
return self._favorite_count
def SetFavoriteCount(self, favorite_count):
'''Set the favorited state of this status message.
Args:
favorite_count:
int number of favorites for this status message
'''
self._favorite_count = favorite_count
favorite_count = property(GetFavoriteCount, SetFavoriteCount,
doc = 'The number of favorites for this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id:
The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc = '')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc = '')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc = '')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc = '')
def GetRetweeted(self):
return self._retweeted
def SetRetweeted(self, retweeted):
self._retweeted = retweeted
retweeted = property(GetRetweeted, SetRetweeted,
doc = '')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc = '')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text:
The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc = 'The text of this status message')
def GetLocation(self):
'''Get the geolocation associated with this status message
Returns:
The geolocation string of this status message.
'''
return self._location
def SetLocation(self, location):
'''Set the geolocation associated with this status message
Args:
location:
The geolocation string of this status message
'''
self._location = location
location = property(GetLocation, SetLocation,
doc = 'The geolocation string of this status message')
def GetRelativeCreatedAt(self):
'''Get a human readable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = long(self.now) - long(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1 / fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1 / fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge) or delta / (60 * 60) == 1:
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1 / fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge) or delta / (60 * 60 * 24) == 1:
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc = 'Get a human readable string representing '
'the posting time')
def GetUser(self):
'''Get a twitter.User representing the entity posting this status message.
Returns:
A twitter.User representing the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User representing the entity posting this status message.
Args:
user:
A twitter.User representing the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc = 'A twitter.User representing the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now:
The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc = 'The wallclock time for this status instance.')
def GetGeo(self):
return self._geo
def SetGeo(self, geo):
self._geo = geo
geo = property(GetGeo, SetGeo,
doc = '')
def GetPlace(self):
return self._place
def SetPlace(self, place):
self._place = place
place = property(GetPlace, SetPlace,
doc = '')
def GetCoordinates(self):
return self._coordinates
def SetCoordinates(self, coordinates):
self._coordinates = coordinates
coordinates = property(GetCoordinates, SetCoordinates,
doc = '')
def GetContributors(self):
return self._contributors
def SetContributors(self, contributors):
self._contributors = contributors
contributors = property(GetContributors, SetContributors,
doc = '')
def GetRetweeted_status(self):
return self._retweeted_status
def SetRetweeted_status(self, retweeted_status):
self._retweeted_status = retweeted_status
retweeted_status = property(GetRetweeted_status, SetRetweeted_status,
doc = '')
def GetRetweetCount(self):
return self._retweet_count
def SetRetweetCount(self, retweet_count):
self._retweet_count = retweet_count
retweet_count = property(GetRetweetCount, SetRetweetCount,
doc = '')
def GetCurrent_user_retweet(self):
return self._current_user_retweet
def SetCurrent_user_retweet(self, current_user_retweet):
self._current_user_retweet = current_user_retweet
current_user_retweet = property(GetCurrent_user_retweet, SetCurrent_user_retweet,
doc = '')
def GetPossibly_sensitive(self):
return self._possibly_sensitive
def SetPossibly_sensitive(self, possibly_sensitive):
self._possibly_sensitive = possibly_sensitive
possibly_sensitive = property(GetPossibly_sensitive, SetPossibly_sensitive,
doc = '')
def GetScopes(self):
return self._scopes
def SetScopes(self, scopes):
self._scopes = scopes
scopes = property(GetScopes, SetScopes, doc = '')
def GetWithheld_copyright(self):
return self._withheld_copyright
def SetWithheld_copyright(self, withheld_copyright):
self._withheld_copyright = withheld_copyright
withheld_copyright = property(GetWithheld_copyright, SetWithheld_copyright,
doc = '')
def GetWithheld_in_countries(self):
return self._withheld_in_countries
def SetWithheld_in_countries(self, withheld_in_countries):
self._withheld_in_countries = withheld_in_countries
withheld_in_countries = property(GetWithheld_in_countries, SetWithheld_in_countries,
doc = '')
def GetWithheld_scope(self):
return self._withheld_scope
def SetWithheld_scope(self, withheld_scope):
self._withheld_scope = withheld_scope
withheld_scope = property(GetWithheld_scope, SetWithheld_scope,
doc = '')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.location == other.location and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.retweeted == other.retweeted and \
self.favorited == other.favorited and \
self.favorite_count == other.favorite_count and \
self.source == other.source and \
self.geo == other.geo and \
self.place == other.place and \
self.coordinates == other.coordinates and \
self.contributors == other.contributors and \
self.retweeted_status == other.retweeted_status and \
self.retweet_count == other.retweet_count and \
self.current_user_retweet == other.current_user_retweet and \
self.possibly_sensitive == other.possibly_sensitive and \
self.scopes == other.scopes and \
self.withheld_copyright == other.withheld_copyright and \
self.withheld_in_countries == other.withheld_in_countries and \
self.withheld_scope == other.withheld_scope
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.favorite_count:
data['favorite_count'] = self.favorite_count
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.location:
data['location'] = self.location
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.retweeted is not None:
data['retweeted'] = self.retweeted
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
if self.geo:
data['geo'] = self.geo
if self.place:
data['place'] = self.place
if self.coordinates:
data['coordinates'] = self.coordinates
if self.contributors:
data['contributors'] = self.contributors
if self.hashtags:
data['hashtags'] = [h.text for h in self.hashtags]
if self.retweeted_status:
data['retweeted_status'] = self.retweeted_status.AsDict()
if self.retweet_count:
data['retweet_count'] = self.retweet_count
if self.urls:
data['urls'] = dict([(url.url, url.expanded_url) for url in self.urls])
if self.user_mentions:
data['user_mentions'] = [um.AsDict() for um in self.user_mentions]
if self.current_user_retweet:
data['current_user_retweet'] = self.current_user_retweet
if self.possibly_sensitive:
data['possibly_sensitive'] = self.possibly_sensitive
if self.scopes:
data['scopes'] = self.scopes
if self.withheld_copyright:
data['withheld_copyright'] = self.withheld_copyright
if self.withheld_in_countries:
data['withheld_in_countries'] = self.withheld_in_countries
if self.withheld_scope:
data['withheld_scope'] = self.withheld_scope
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
if 'retweeted_status' in data:
retweeted_status = Status.NewFromJsonDict(data['retweeted_status'])
else:
retweeted_status = None
if 'current_user_retweet' in data:
current_user_retweet = data['current_user_retweet']['id']
else:
current_user_retweet = None
urls = None
user_mentions = None
hashtags = None
media = None
if 'entities' in data:
if 'urls' in data['entities']:
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
if 'user_mentions' in data['entities']:
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
if 'hashtags' in data['entities']:
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
if 'media' in data['entities']:
media = data['entities']['media']
else:
media = []
return Status(created_at = data.get('created_at', None),
favorited = data.get('favorited', None),
favorite_count = data.get('favorite_count', None),
id = data.get('id', None),
text = data.get('text', None),
location = data.get('location', None),
in_reply_to_screen_name = data.get('in_reply_to_screen_name', None),
in_reply_to_user_id = data.get('in_reply_to_user_id', None),
in_reply_to_status_id = data.get('in_reply_to_status_id', None),
truncated = data.get('truncated', None),
retweeted = data.get('retweeted', None),
source = data.get('source', None),
user = user,
urls = urls,
user_mentions = user_mentions,
hashtags = hashtags,
media = media,
geo = data.get('geo', None),
place = data.get('place', None),
coordinates = data.get('coordinates', None),
contributors = data.get('contributors', None),
retweeted_status = retweeted_status,
current_user_retweet = current_user_retweet,
retweet_count = data.get('retweet_count', None),
possibly_sensitive = data.get('possibly_sensitive', None),
scopes = data.get('scopes', None),
withheld_copyright = data.get('withheld_copyright', None),
withheld_in_countries = data.get('withheld_in_countries', None),
withheld_scope = data.get('withheld_scope', None))
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
user.geo_enabled
user.verified
user.lang
user.notifications
user.contributors_enabled
user.created_at
user.listed_count
'''
def __init__(self,
id = None,
name = None,
screen_name = None,
location = None,
description = None,
profile_image_url = None,
profile_background_tile = None,
profile_background_image_url = None,
profile_sidebar_fill_color = None,
profile_background_color = None,
profile_link_color = None,
profile_text_color = None,
protected = None,
utc_offset = None,
time_zone = None,
followers_count = None,
friends_count = None,
statuses_count = None,
favourites_count = None,
url = None,
status = None,
geo_enabled = None,
verified = None,
lang = None,
notifications = None,
contributors_enabled = None,
created_at = None,
listed_count = None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
self.geo_enabled = geo_enabled
self.verified = verified
self.lang = lang
self.notifications = notifications
self.contributors_enabled = contributors_enabled
self.created_at = created_at
self.listed_count = listed_count
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc = 'The real name of this user.')
def GetScreenName(self):
'''Get the short twitter name of this user.
Returns:
The short twitter name of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short twitter name of this user.
Args:
screen_name: the short twitter name of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc = 'The short twitter name of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc = 'The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc = 'The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc = 'The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url = property(GetProfileImageUrl, SetProfileImageUrl,
doc = 'The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc = 'Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc = 'The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone:
The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status:
The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc = 'The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count:
The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc = 'The number of friends for this user.')
def GetListedCount(self):
'''Get the listed count for this user.
Returns:
The number of lists this user belongs to.
'''
return self._listed_count
def SetListedCount(self, count):
'''Set the listed count for this user.
Args:
count:
The number of lists this user belongs to.
'''
self._listed_count = count
listed_count = property(GetListedCount, SetListedCount,
doc = 'The number of lists this user belongs to.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count:
The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc = 'The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count:
The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc = 'The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count:
The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc = 'The number of favourites for this user.')
def GetGeoEnabled(self):
'''Get the setting of geo_enabled for this user.
Returns:
True/False if Geo tagging is enabled
'''
return self._geo_enabled
def SetGeoEnabled(self, geo_enabled):
'''Set the latest twitter.geo_enabled of this user.
Args:
geo_enabled:
True/False if Geo tagging is to be enabled
'''
self._geo_enabled = geo_enabled
geo_enabled = property(GetGeoEnabled, SetGeoEnabled,
doc = 'The value of twitter.geo_enabled for this user.')
def GetVerified(self):
'''Get the setting of verified for this user.
Returns:
True/False if user is a verified account
'''
return self._verified
def SetVerified(self, verified):
'''Set twitter.verified for this user.
Args:
verified:
True/False if user is a verified account
'''
self._verified = verified
verified = property(GetVerified, SetVerified,
doc = 'The value of twitter.verified for this user.')
def GetLang(self):
'''Get the setting of lang for this user.
Returns:
language code of the user
'''
return self._lang
def SetLang(self, lang):
'''Set twitter.lang for this user.
Args:
lang:
language code for the user
'''
self._lang = lang
lang = property(GetLang, SetLang,
doc = 'The value of twitter.lang for this user.')
def GetNotifications(self):
'''Get the setting of notifications for this user.
Returns:
True/False for the notifications setting of the user
'''
return self._notifications
def SetNotifications(self, notifications):
'''Set twitter.notifications for this user.
Args:
notifications:
True/False notifications setting for the user
'''
self._notifications = notifications
notifications = property(GetNotifications, SetNotifications,
doc = 'The value of twitter.notifications for this user.')
def GetContributorsEnabled(self):
'''Get the setting of contributors_enabled for this user.
Returns:
True/False contributors_enabled of the user
'''
return self._contributors_enabled
def SetContributorsEnabled(self, contributors_enabled):
'''Set twitter.contributors_enabled for this user.
Args:
contributors_enabled:
True/False contributors_enabled setting for the user
'''
self._contributors_enabled = contributors_enabled
contributors_enabled = property(GetContributorsEnabled, SetContributorsEnabled,
doc = 'The value of twitter.contributors_enabled for this user.')
def GetCreatedAt(self):
'''Get the setting of created_at for this user.
Returns:
created_at value of the user
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set twitter.created_at for this user.
Args:
created_at:
created_at value for the user
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The value of twitter.created_at for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status and \
self.geo_enabled == other.geo_enabled and \
self.verified == other.verified and \
self.lang == other.lang and \
self.notifications == other.notifications and \
self.contributors_enabled == other.contributors_enabled and \
self.created_at == other.created_at and \
self.listed_count == other.listed_count
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
if self.geo_enabled:
data['geo_enabled'] = self.geo_enabled
if self.verified:
data['verified'] = self.verified
if self.lang:
data['lang'] = self.lang
if self.notifications:
data['notifications'] = self.notifications
if self.contributors_enabled:
data['contributors_enabled'] = self.contributors_enabled
if self.created_at:
data['created_at'] = self.created_at
if self.listed_count:
data['listed_count'] = self.listed_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id = data.get('id', None),
name = data.get('name', None),
screen_name = data.get('screen_name', None),
location = data.get('location', None),
description = data.get('description', None),
statuses_count = data.get('statuses_count', None),
followers_count = data.get('followers_count', None),
favourites_count = data.get('favourites_count', None),
friends_count = data.get('friends_count', None),
profile_image_url = data.get('profile_image_url_https', data.get('profile_image_url', None)),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url = data.get('url', None),
status = status,
geo_enabled = data.get('geo_enabled', None),
verified = data.get('verified', None),
lang = data.get('lang', None),
notifications = data.get('notifications', None),
contributors_enabled = data.get('contributors_enabled', None),
created_at = data.get('created_at', None),
listed_count = data.get('listed_count', None))
class List(object):
'''A class representing the List structure used by the twitter API.
The List structure exposes the following properties:
list.id
list.name
list.slug
list.description
list.full_name
list.mode
list.uri
list.member_count
list.subscriber_count
list.following
'''
def __init__(self,
id = None,
name = None,
slug = None,
description = None,
full_name = None,
mode = None,
uri = None,
member_count = None,
subscriber_count = None,
following = None,
user = None):
self.id = id
self.name = name
self.slug = slug
self.description = description
self.full_name = full_name
self.mode = mode
self.uri = uri
self.member_count = member_count
self.subscriber_count = subscriber_count
self.following = following
self.user = user
def GetId(self):
'''Get the unique id of this list.
Returns:
The unique id of this list
'''
return self._id
def SetId(self, id):
'''Set the unique id of this list.
Args:
id:
The unique id of this list.
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this list.')
def GetName(self):
'''Get the real name of this list.
Returns:
The real name of this list
'''
return self._name
def SetName(self, name):
'''Set the real name of this list.
Args:
name:
The real name of this list
'''
self._name = name
name = property(GetName, SetName,
doc = 'The real name of this list.')
def GetSlug(self):
'''Get the slug of this list.
Returns:
The slug of this list
'''
return self._slug
def SetSlug(self, slug):
'''Set the slug of this list.
Args:
slug:
The slug of this list.
'''
self._slug = slug
slug = property(GetSlug, SetSlug,
doc = 'The slug of this list.')
def GetDescription(self):
'''Get the description of this list.
Returns:
The description of this list
'''
return self._description
def SetDescription(self, description):
'''Set the description of this list.
Args:
description:
The description of this list.
'''
self._description = description
description = property(GetDescription, SetDescription,
doc = 'The description of this list.')
def GetFull_name(self):
'''Get the full_name of this list.
Returns:
The full_name of this list
'''
return self._full_name
def SetFull_name(self, full_name):
'''Set the full_name of this list.
Args:
full_name:
The full_name of this list.
'''
self._full_name = full_name
full_name = property(GetFull_name, SetFull_name,
doc = 'The full_name of this list.')
def GetMode(self):
'''Get the mode of this list.
Returns:
The mode of this list
'''
return self._mode
def SetMode(self, mode):
'''Set the mode of this list.
Args:
mode:
The mode of this list.
'''
self._mode = mode
mode = property(GetMode, SetMode,
doc = 'The mode of this list.')
def GetUri(self):
'''Get the uri of this list.
Returns:
The uri of this list
'''
return self._uri
def SetUri(self, uri):
'''Set the uri of this list.
Args:
uri:
The uri of this list.
'''
self._uri = uri
uri = property(GetUri, SetUri,
doc = 'The uri of this list.')
def GetMember_count(self):
'''Get the member_count of this list.
Returns:
The member_count of this list
'''
return self._member_count
def SetMember_count(self, member_count):
'''Set the member_count of this list.
Args:
member_count:
The member_count of this list.
'''
self._member_count = member_count
member_count = property(GetMember_count, SetMember_count,
doc = 'The member_count of this list.')
def GetSubscriber_count(self):
'''Get the subscriber_count of this list.
Returns:
The subscriber_count of this list
'''
return self._subscriber_count
def SetSubscriber_count(self, subscriber_count):
'''Set the subscriber_count of this list.
Args:
subscriber_count:
The subscriber_count of this list.
'''
self._subscriber_count = subscriber_count
subscriber_count = property(GetSubscriber_count, SetSubscriber_count,
doc = 'The subscriber_count of this list.')
def GetFollowing(self):
'''Get the following status of this list.
Returns:
The following status of this list
'''
return self._following
def SetFollowing(self, following):
'''Set the following status of this list.
Args:
following:
The following of this list.
'''
self._following = following
following = property(GetFollowing, SetFollowing,
doc = 'The following status of this list.')
def GetUser(self):
'''Get the user of this list.
Returns:
The owner of this list
'''
return self._user
def SetUser(self, user):
'''Set the user of this list.
Args:
user:
The owner of this list.
'''
self._user = user
user = property(GetUser, SetUser,
doc = 'The owner of this list.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.slug == other.slug and \
self.description == other.description and \
self.full_name == other.full_name and \
self.mode == other.mode and \
self.uri == other.uri and \
self.member_count == other.member_count and \
self.subscriber_count == other.subscriber_count and \
self.following == other.following and \
self.user == other.user
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.List instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.List instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.List instance.
Returns:
A JSON string representation of this twitter.List instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.List instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.List instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.slug:
data['slug'] = self.slug
if self.description:
data['description'] = self.description
if self.full_name:
data['full_name'] = self.full_name
if self.mode:
data['mode'] = self.mode
if self.uri:
data['uri'] = self.uri
if self.member_count is not None:
data['member_count'] = self.member_count
if self.subscriber_count is not None:
data['subscriber_count'] = self.subscriber_count
if self.following is not None:
data['following'] = self.following
if self.user is not None:
data['user'] = self.user.AsDict()
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.List instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return List(id = data.get('id', None),
name = data.get('name', None),
slug = data.get('slug', None),
description = data.get('description', None),
full_name = data.get('full_name', None),
mode = data.get('mode', None),
uri = data.get('uri', None),
member_count = data.get('member_count', None),
subscriber_count = data.get('subscriber_count', None),
following = data.get('following', None),
user = user)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id = None,
created_at = None,
sender_id = None,
sender_screen_name = None,
recipient_id = None,
recipient_screen_name = None,
text = None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id:
The unique id of this direct message. [Optional]
created_at:
The time this direct message was posted. [Optional]
sender_id:
The id of the twitter user that sent this message. [Optional]
sender_screen_name:
The name of the twitter user that sent this message. [Optional]
recipient_id:
The id of the twitter that received this message. [Optional]
recipient_screen_name:
The name of the twitter that received this message. [Optional]
text:
The text of this direct message. [Optional]
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id:
The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at:
The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc = "The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender_id:
The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc = 'The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name:
The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc = 'The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient_id:
The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc = 'The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name:
The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc = 'The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text:
The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc = 'The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at = data.get('created_at', None),
recipient_id = data.get('recipient_id', None),
sender_id = data.get('sender_id', None),
text = data.get('text', None),
sender_screen_name = data.get('sender_screen_name', None),
id = data.get('id', None),
recipient_screen_name = data.get('recipient_screen_name', None))
class Hashtag(object):
''' A class representing a twitter hashtag
'''
def __init__(self,
text = None):
self.text = text
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Hashtag instance
'''
return Hashtag(text = data.get('text', None))
class Trend(object):
''' A class representing a trending topic
'''
def __init__(self, name = None, query = None, timestamp = None, url = None):
self.name = name
self.query = query
self.timestamp = timestamp
self.url = url
def __str__(self):
return 'Name: %s\nQuery: %s\nTimestamp: %s\nSearch URL: %s\n' % (self.name, self.query, self.timestamp, self.url)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.name == other.name and \
self.query == other.query and \
self.timestamp == other.timestamp and \
self.url == self.url
except AttributeError:
return False
@staticmethod
def NewFromJsonDict(data, timestamp = None):
'''Create a new instance based on a JSON dict
Args:
data:
A JSON dict
timestamp:
Gets set as the timestamp property of the new object
Returns:
A twitter.Trend object
'''
return Trend(name = data.get('name', None),
query = data.get('query', None),
url = data.get('url', None),
timestamp = timestamp)
class Url(object):
'''A class representing an URL contained in a tweet'''
def __init__(self,
url = None,
expanded_url = None):
self.url = url
self.expanded_url = expanded_url
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Url instance
'''
return Url(url = data.get('url', None),
expanded_url = data.get('expanded_url', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
consumer key and secret; and the oAuth key and secret:
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetHomeTimeLine()
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.GetSentDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
>>> api.VerifyCredentials()
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
consumer_key = None,
consumer_secret = None,
access_token_key = None,
access_token_secret = None,
input_encoding = None,
request_headers = None,
cache = DEFAULT_CACHE,
shortner = None,
base_url = None,
use_gzip_compression = False,
debugHTTP = False):
'''Instantiate a new twitter.Api object.
Args:
consumer_key:
Your Twitter user's consumer_key.
consumer_secret:
Your Twitter user's consumer_secret.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
input_encoding:
The encoding used to encode input strings. [Optional]
request_header:
A dictionary of additional HTTP request headers. [Optional]
cache:
The cache instance to use. Defaults to DEFAULT_CACHE.
Use None to disable caching. [Optional]
shortner:
The shortner instance to use. Defaults to None.
See shorten_url.py for an example shortner. [Optional]
base_url:
The base URL to use to contact the Twitter API.
Defaults to https://api.twitter.com. [Optional]
use_gzip_compression:
Set to True to tell enable gzip compression for any call
made to Twitter. Defaults to False. [Optional]
debugHTTP:
Set to True to enable debug output from urllib2 when performing
any HTTP requests. Defaults to False. [Optional]
'''
self.SetCache(cache)
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._input_encoding = input_encoding
self._use_gzip = use_gzip_compression
self._debugHTTP = debugHTTP
self._oauth_consumer = None
self._shortlink_size = 19
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
if base_url is None:
self.base_url = 'https://api.twitter.com/1.1'
else:
self.base_url = base_url
if consumer_key is not None and (access_token_key is None or
access_token_secret is None):
print >> sys.stderr, 'Twitter now requires an oAuth Access Token for API calls.'
print >> sys.stderr, 'If your using this library from a command line utility, please'
print >> sys.stderr, 'run the the included get_access_token.py tool to generate one.'
raise TwitterError('Twitter requires oAuth Access Token for all API access')
self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret)
def SetCredentials(self,
consumer_key,
consumer_secret,
access_token_key = None,
access_token_secret = None):
'''Set the consumer_key and consumer_secret for this instance
Args:
consumer_key:
The consumer_key of the twitter account.
consumer_secret:
The consumer_secret for the twitter account.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
'''
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._access_token_key = access_token_key
self._access_token_secret = access_token_secret
self._oauth_consumer = None
if consumer_key is not None and consumer_secret is not None and \
access_token_key is not None and access_token_secret is not None:
self._signature_method_plaintext = oauth.SignatureMethod_PLAINTEXT()
self._signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
self._oauth_token = oauth.Token(key = access_token_key, secret = access_token_secret)
self._oauth_consumer = oauth.Consumer(key = consumer_key, secret = consumer_secret)
def ClearCredentials(self):
'''Clear the any credentials for this instance
'''
self._consumer_key = None
self._consumer_secret = None
self._access_token_key = None
self._access_token_secret = None
self._oauth_consumer = None
def GetSearch(self,
term = None,
geocode = None,
since_id = None,
max_id = None,
until = None,
count = 15,
lang = None,
locale = None,
result_type = "mixed",
include_entities = None):
'''Return twitter search results for a given term.
Args:
term:
Term to search by. Optional if you include geocode.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
until:
Returns tweets generated before the given date. Date should be
formatted as YYYY-MM-DD. [Optional]
geocode:
Geolocation information in the form (latitude, longitude, radius)
[Optional]
count:
Number of results to return. Default is 15 [Optional]
lang:
Language for results as ISO 639-1 code. Default is None (all languages)
[Optional]
locale:
Language of the search query. Currently only 'ja' is effective. This is
intended for language-specific consumers and the default should work in
the majority of cases.
result_type:
Type of result which should be returned. Default is "mixed". Other
valid options are "recent" and "popular". [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if until:
parameters['until'] = until
if lang:
parameters['lang'] = lang
if locale:
parameters['locale'] = locale
if term is None and geocode is None:
return []
if term is not None:
parameters['q'] = term
if geocode is not None:
parameters['geocode'] = ','.join(map(str, geocode))
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if result_type in ["mixed", "popular", "recent"]:
parameters['result_type'] = result_type
# Make and send requests
url = '%s/search/tweets.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
# Return built list of statuses
return [Status.NewFromJsonDict(x) for x in data['statuses']]
def GetUsersSearch(self,
term = None,
page = 1,
count = 20,
include_entities = None):
'''Return twitter user search results for a given term.
Args:
term:
Term to search by.
page:
Page of results to return. Default is 1
[Optional]
count:
Number of results to return. Default is 20
[Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and hashtags.
[Optional]
Returns:
A sequence of twitter.User instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if term is not None:
parameters['q'] = term
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
# Make and send requests
url = '%s/users/search.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(x) for x in data]
def GetTrendsCurrent(self, exclude = None):
'''Get the current top trending topics (global)
Args:
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
return self.GetTrendsWoeid(id = 1, exclude = exclude)
def GetTrendsWoeid(self, id, exclude = None):
'''Return the top 10 trending topics for a specific WOEID, if trending
information is available for it.
Args:
woeid:
the Yahoo! Where On Earth ID for a location.
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
url = '%s/trends/place.json' % (self.base_url)
parameters = {'id': id}
if exclude:
parameters['exclude'] = exclude
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
timestamp = data[0]['as_of']
for trend in data[0]['trends']:
trends.append(Trend.NewFromJsonDict(trend, timestamp = timestamp))
return trends
def GetHomeTimeline(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
exclude_replies = False,
contributor_details = False,
include_entities = True):
'''
Fetch a collection of the most recent Tweets and retweets posted by the
authenticating user and the users they follow.
The home timeline is central to how most users interact with the Twitter
service.
The twitter.Api instance must be authenticated.
Args:
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. Defaults to 20. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
When True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object. [Optional]
exclude_replies:
This parameter will prevent replies from appearing in the
returned timeline. Using exclude_replies with the count
parameter will mean you will receive up-to count tweets -
this is because the count parameter retrieves that many
tweets before filtering out retweets and replies.
[Optional]
contributor_details:
This parameter enhances the contributors element of the
status response to include the screen_name of the contributor.
By default only the user_id of the contributor is included.
[Optional]
include_entities:
The entities node will be disincluded when set to false.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
url = '%s/statuses/home_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
if count is not None:
try:
if int(count) > 200:
raise TwitterError("'count' may not be greater than 200")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if since_id:
try:
parameters['since_id'] = long(since_id)
except ValueError:
raise TwitterError("'since_id' must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except ValueError:
raise TwitterError("'max_id' must be an integer")
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
if contributor_details:
parameters['contributor_details'] = 1
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self,
user_id = None,
screen_name = None,
since_id = None,
max_id = None,
count = None,
include_rts = None,
trim_user = None,
exclude_replies = None):
'''Fetch the sequence of public Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
user_id:
Specifies the ID of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid user ID
is also a valid screen name. [Optional]
screen_name:
Specifies the screen name of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid screen
name is also a user ID. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
trim_user:
If True, statuses will only contain the numerical user ID only.
Otherwise a full user object will be returned for each status.
[Optional]
exclude_replies:
If True, this will prevent replies from appearing in the returned
timeline. Using exclude_replies with the count parameter will mean you
will receive up-to count tweets - this is because the count parameter
retrieves that many tweets before filtering out retweets and replies.
This parameter is only supported for JSON and XML responses. [Optional]
Returns:
A sequence of Status instances, one for each message up to count
'''
parameters = {}
url = '%s/statuses/user_timeline.json' % (self.base_url)
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_rts:
parameters['include_rts'] = 1
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self,
id,
trim_user = False,
include_my_retweet = True,
include_entities = True):
'''Returns a single status message, specified by the id parameter.
The twitter.Api instance must be authenticated.
Args:
id:
The numeric ID of the status you are trying to retrieve.
trim_user:
When set to True, each tweet returned in a timeline will include
a user object including only the status authors numerical ID.
Omit this parameter to receive the complete user object.
[Optional]
include_my_retweet:
When set to True, any Tweets returned that have been retweeted by
the authenticating user will include an additional
current_user_retweet node, containing the ID of the source status
for the retweet. [Optional]
include_entities:
If False, the entities node will be disincluded.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A twitter.Status instance representing that status message
'''
url = '%s/statuses/show.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
try:
parameters['id'] = long(id)
except ValueError:
raise TwitterError("'id' must be an integer.")
if trim_user:
parameters['trim_user'] = 1
if include_my_retweet:
parameters['include_my_retweet'] = 1
if not include_entities:
parameters['include_entities'] = 'none'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id, trim_user = False):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and the
authenticating user must be the author of the specified status.
Args:
id:
The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
try:
post_data = {'id': long(id)}
except:
raise TwitterError("id must be an integer")
url = '%s/statuses/destroy/%s.json' % (self.base_url, id)
if trim_user:
post_data['trim_user'] = 1
json = self._FetchUrl(url, post_data = post_data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
@classmethod
def _calculate_status_length(cls, status, linksize = 19):
dummy_link_replacement = 'https://-%d-chars%s/' % (linksize, '-' * (linksize - 18))
shortened = ' '.join([x if not (x.startswith('http://') or
x.startswith('https://'))
else
dummy_link_replacement
for x in status.split(' ')])
return len(shortened)
def PostUpdate(self, status, in_reply_to_status_id = None, latitude = None, longitude = None, place_id = None, display_coordinates = False, trim_user = False):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
https://dev.twitter.com/docs/api/1.1/post/statuses/update
Args:
status:
The message text to be posted.
Must be less than or equal to 140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
latitude:
Latitude coordinate of the tweet in degrees. Will only work
in conjunction with longitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
longitude:
Longitude coordinate of the tweet in degrees. Will only work
in conjunction with latitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
place_id:
A place in the world. These IDs can be retrieved from
GET geo/reverse_geocode. [Optional]
display_coordinates:
Whether or not to put a pin on the exact coordinates a tweet
has been sent from. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/update.json' % self.base_url
if isinstance(status, unicode) or self._input_encoding is None:
u_status = status
else:
u_status = unicode(status, self._input_encoding)
#if self._calculate_status_length(u_status, self._shortlink_size) > CHARACTER_LIMIT:
# raise TwitterError("Text must be less than or equal to %d characters. "
# "Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
if latitude is not None and longitude is not None:
data['lat'] = str(latitude)
data['long'] = str(longitude)
if place_id is not None:
data['place_id'] = str(place_id)
if display_coordinates:
data['display_coordinates'] = 'true'
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation = None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def PostRetweet(self, original_id, trim_user = False):
'''Retweet a tweet with the Retweet API.
The twitter.Api instance must be authenticated.
Args:
original_id:
The numerical id of the tweet that will be retweeted
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the original tweet with retweet details embedded.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
try:
if int(original_id) <= 0:
raise TwitterError("'original_id' must be a positive number")
except ValueError:
raise TwitterError("'original_id' must be an integer")
url = '%s/statuses/retweet/%s.json' % (self.base_url, original_id)
data = {'id': original_id}
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetUserRetweets(self, count = None, since_id = None, max_id = None, trim_user = False):
'''Fetch the sequence of retweets made by the authenticated user.
The twitter.Api instance must be authenticated.
Args:
count:
The number of status messages to retrieve. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
return self.GetUserTimeline(since_id = since_id, count = count, max_id = max_id, trim_user = trim_user, exclude_replies = True, include_rts = True)
def GetReplies(self, since_id = None, count = None, max_id = None, trim_user = False):
'''Get a sequence of status messages representing the 20 most
recent replies (status updates prefixed with @twitterID) to the
authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
return self.GetUserTimeline(since_id = since_id, count = count, max_id = max_id, trim_user = trim_user, exclude_replies = False, include_rts = False)
def GetRetweets(self, statusid, count = None, trim_user = False):
'''Returns up to 100 of the first retweets of the tweet identified
by statusid
Args:
statusid:
The ID of the tweet for which retweets should be searched for
count:
The number of status messages to retrieve. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A list of twitter.Status instances, which are retweets of statusid
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instsance must be authenticated.")
url = '%s/statuses/retweets/%s.json' % (self.base_url, statusid)
parameters = {}
if trim_user:
parameters['trim_user'] = 'true'
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetRetweetsOfMe(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
include_entities = True,
include_user_entities = True):
'''Returns up to 100 of the most recent tweets of the user that have been
retweeted by others.
Args:
count:
The number of retweets to retrieve, up to 100. If omitted, 20 is
assumed.
since_id:
Returns results with an ID greater than (newer than) this ID.
max_id:
Returns results with an ID less than or equal to this ID.
trim_user:
When True, the user object for each tweet will only be an ID.
include_entities:
When True, the tweet entities will be included.
include_user_entities:
When True, the user entities will be included.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/retweets_of_me.json' % self.base_url
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
if count:
parameters['count'] = count
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if trim_user:
parameters['trim_user'] = trim_user
if not include_entities:
parameters['include_entities'] = include_entities
if not include_user_entities:
parameters['include_user_entities'] = include_user_entities
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetFriends(self, user_id = None, screen_name = None, cursor = -1, skip_status = False, include_user_entities = False):
'''Fetch the sequence of twitter.User instances, one for each friend.
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/friends/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFriendIDs(self, user_id = None, screen_name = None, cursor = -1, stringify_ids = False, count = None):
'''Returns a list of twitter user id's for every person
the specified user is following.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of status messages to retrieve. [Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/friends/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFollowerIDs(self, user_id = None, screen_name = None, cursor = -1, stringify_ids = False, count = None, total_count = None):
'''Returns a list of twitter user id's for every person
that is following the specified user.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of user id's to retrieve per API request. Please be aware that
this might get you rate-limited if set to a small number. By default Twitter
will retrieve 5000 UIDs per call.
[Optional]
total_count:
The total amount of UIDs to retrieve. Good if the account has many followers
and you don't want to get rate limited. The data returned might contain more
UIDs if total_count is not a multiple of count (5000 by default).
[Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/followers/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
if total_count and total_count < count:
parameters['count'] = total_count
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
total_count -= len(data['ids'])
if total_count < 1:
break
else:
break
return result
def GetFollowers(self, user_id = None, screen_name = None, cursor = -1, skip_status = False, include_user_entities = False):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/followers/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def UsersLookup(self, user_id = None, screen_name = None, users = None, include_entities = True):
'''Fetch extended information for the specified users.
Users may be specified either as lists of either user_ids,
screen_names, or twitter.User objects. The list of users that
are queried is the union of all specified parameters.
The twitter.Api instance must be authenticated.
Args:
user_id:
A list of user_ids to retrieve extended information.
[Optional]
screen_name:
A list of screen_names to retrieve extended information.
[Optional]
users:
A list of twitter.User objects to retrieve extended information.
[Optional]
include_entities:
The entities node that may appear within embedded statuses will be
disincluded when set to False.
[Optional]
Returns:
A list of twitter.User objects for the requested users
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if not user_id and not screen_name and not users:
raise TwitterError("Specify at least one of user_id, screen_name, or users.")
url = '%s/users/lookup.json' % self.base_url
parameters = {}
uids = list()
if user_id:
uids.extend(user_id)
if users:
uids.extend([u.id for u in users])
if len(uids):
parameters['user_id'] = ','.join(["%s" % u for u in uids])
if screen_name:
parameters['screen_name'] = ','.join(screen_name)
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
try:
data = self._ParseAndCheckTwitter(json)
except TwitterError, e:
_, e, _ = sys.exc_info()
t = e.args[0]
if len(t) == 1 and ('code' in t[0]) and (t[0]['code'] == 34):
data = []
else:
raise
return [User.NewFromJsonDict(u) for u in data]
def GetUser(self, user_id = None, screen_name = None, include_entities = True):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The id of the user to retrieve.
[Optional]
screen_name:
The screen name of the user for whom to return results for. Either a
user_id or screen_name is required for this method.
[Optional]
include_entities:
if set to False, the 'entities' node will not be included.
[Optional]
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show.json' % (self.base_url)
parameters = {}
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since_id = None, max_id = None, count = None, include_entities = True, skip_status = False):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
skip_status:
When set to True statuses will not be included in the returned user
objects. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
if skip_status:
parameters['skip_status'] = 1
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def GetSentDirectMessages(self, since_id = None, max_id = None, count = None, page = None, include_entities = True):
'''Returns a list of the direct messages sent by the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages/sent.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, text, user_id = None, screen_name = None):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated. user_id or screen_name
must be specified.
Args:
text: The message text to be posted. Must be less than 140 characters.
user_id:
The ID of the user who should receive the direct message.
[Optional]
screen_name:
The screen name of the user who should receive the direct message.
[Optional]
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/direct_messages/new.json' % self.base_url
data = {'text': text}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id, include_entities = True):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = '%s/direct_messages/destroy.json' % self.base_url
data = {'id': id}
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user_id = None, screen_name = None, follow = True):
'''Befriends the user specified by the user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to follow [Optional]
screen_name:
A screen_name to follow [Optional]
follow:
Set to False to disable notifications for the target user
Returns:
A twitter.User instance representing the befriended user.
'''
url = '%s/friendships/create.json' % (self.base_url)
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if follow:
data['follow'] = 'true'
else:
data['follow'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user_id = None, screen_name = None):
'''Discontinues friendship with a user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to unfollow [Optional]
screen_name:
A screen_name to unfollow [Optional]
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = '%s/friendships/destroy.json' % self.base_url
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status = None, id = None, include_entities = True):
'''Favorites the specified status object or id as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to mark as a favorite.
[Optional]
status:
The twitter.Status object to mark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = '%s/favorites/create.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status = None, id = None, include_entities = True):
'''Un-Favorites the specified status object or id as the authenticating user.
Returns the un-favorited status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to unmark as a favorite.
[Optional]
status:
The twitter.Status object to unmark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = '%s/favorites/destroy.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetFavorites(self,
user_id = None,
screen_name = None,
count = None,
since_id = None,
max_id = None,
include_entities = True):
'''Return a list of Status objects representing favorited tweets.
By default, returns the (up to) 20 most recent tweets for the
authenticated user.
Args:
user:
The twitter name or id of the user whose favorites you are fetching.
If not specified, defaults to the authenticated user. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
'''
parameters = {}
url = '%s/favorites/list.json' % self.base_url
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = user_id
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetMentions(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
contributor_details = False,
include_entities = True):
'''Returns the 20 most recent mentions (status containing @screen_name)
for the authenticating user.
Args:
count:
Specifies the number of tweets to try and retrieve, up to a maximum of
200. The value of count is best thought of as a limit to the number of
tweets to return because suspended or deleted content is removed after
the count has been applied. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than
(that is, older than) the specified ID. [Optional]
trim_user:
When set to True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object.
contributor_details:
If set to True, this parameter enhances the contributors element of the
status response to include the screen_name of the contributor. By
default only the user_id of the contributor is included.
include_entities:
The entities node will be disincluded when set to False.
Returns:
A sequence of twitter.Status instances, one for each mention of the user.
'''
url = '%s/statuses/mentions_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if trim_user:
parameters['trim_user'] = 1
if contributor_details:
parameters['contributor_details'] = 'true'
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def CreateList(self, name, mode = None, description = None):
'''Creates a new list with the give name for the authenticated user.
The twitter.Api instance must be authenticated.
Args:
name:
New name for the list
mode:
'public' or 'private'.
Defaults to 'public'. [Optional]
description:
Description of the list. [Optional]
Returns:
A twitter.List instance representing the new list
'''
url = '%s/lists/create.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {'name': name}
if mode is not None:
parameters['mode'] = mode
if description is not None:
parameters['description'] = description
json = self._FetchUrl(url, post_data = parameters)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroyList(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''
Destroys the list identified by list_id or owner_screen_name/owner_id and
slug.
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/destroy.json' % self.base_url
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def CreateSubscription(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''Creates a subscription to a list by the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the list subscribed to
'''
url = '%s/lists/subscribers/create.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroySubscription(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''Destroys the subscription to a list for the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/subscribers/destroy.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def GetSubscriptions(self, user_id = None, screen_name = None, count = 20, cursor = -1):
'''
Obtain a collection of the lists the specified user is subscribed to, 20
lists per page by default. Does not include the user's own lists.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20.
No more than 1000 results will ever be returned in a single page.
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/subscriptions.json' % (self.base_url)
parameters = {}
try:
parameters['cursor'] = int(cursor)
except:
raise TwitterError("cursor must be an integer")
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetLists(self, user_id = None, screen_name = None, count = None, cursor = -1):
'''Fetch the sequence of lists for a user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20. No more than
1000 results will ever be returned in a single page.
[Optional]
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/ownerships.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
if count is not None:
parameters['count'] = count
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [List.NewFromJsonDict(x) for x in data['lists']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def VerifyCredentials(self):
'''Returns a twitter.User instance if the authenticating user is valid.
Returns:
A twitter.User instance representing that user if the
credentials are valid, None otherwise.
'''
if not self._oauth_consumer:
raise TwitterError("Api instance must first be given user credentials.")
url = '%s/account/verify_credentials.json' % self.base_url
try:
json = self._FetchUrl(url, no_cache = True)
except urllib2.HTTPError, http_error:
if http_error.code == httplib.UNAUTHORIZED:
return None
else:
raise http_error
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache:
An instance that supports the same API as the twitter._FileCache
'''
if cache == DEFAULT_CACHE:
self._cache = _FileCache()
else:
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib:
An instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout:
Time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent:
A string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def GetRateLimitStatus(self, resources = None):
'''Fetch the rate limit status for the currently authorized user.
Args:
resources:
A comma seperated list of resource families you want to know the current
rate limit disposition of.
[Optional]
Returns:
A dictionary containing the time the limit will reset (reset_time),
the number of remaining hits allowed before the reset (remaining_hits),
the number of hits allowed in a 60-minute period (hourly_limit), and
the time of the reset in seconds since The Epoch (reset_time_in_seconds).
'''
parameters = {}
if resources is not None:
parameters['resources'] = resources
url = '%s/application/rate_limit_status.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters, no_cache = True)
data = self._ParseAndCheckTwitter(json)
return data
def MaximumHitFrequency(self):
'''Determines the minimum number of seconds that a program must wait
before hitting the server again without exceeding the rate_limit
imposed for the currently authenticated user.
Returns:
The minimum second interval that a program must use so as to not
exceed the rate_limit imposed for the user.
'''
rate_status = self.GetRateLimitStatus()
reset_time = rate_status.get('reset_time', None)
limit = rate_status.get('remaining_hits', None)
if reset_time:
# put the reset time into a datetime object
reset = datetime.datetime(*rfc822.parsedate(reset_time)[:7])
# find the difference in time between now and the reset time + 1 hour
delta = reset + datetime.timedelta(hours = 1) - datetime.datetime.utcnow()
if not limit:
return int(delta.seconds)
# determine the minimum number of seconds allowed as a regular interval
max_frequency = int(delta.seconds / limit) + 1
# return the number of seconds
return max_frequency
return 60
def _BuildUrl(self, url, path_elements = None, extra_params = None):
# Break url into constituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _DecompressGzippedResponse(self, response):
raw_data = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
url_data = gzip.GzipFile(fileobj = StringIO.StringIO(raw_data)).read()
else:
url_data = raw_data
return url_data
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _ParseAndCheckTwitter(self, json):
"""Try and parse the JSON returned from Twitter and return
an empty dictionary if there is any error. This is a purely
defensive check because during some Twitter network outages
it will return an HTML failwhale page."""
try:
data = simplejson.loads(json)
self._CheckForTwitterError(data)
except ValueError:
if "<title>Twitter / Over capacity</title>" in json:
raise TwitterError("Capacity Error")
if "<title>Twitter / Error</title>" in json:
raise TwitterError("Technical Error")
raise TwitterError("json decoding")
return data
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data:
A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
if 'errors' in data:
raise TwitterError(data['errors'])
def _FetchUrl(self,
url,
post_data = None,
parameters = None,
no_cache = None,
use_gzip_compression = None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url:
The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs.
If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [Optional]
no_cache:
If true, overrides the cache on the current request
use_gzip_compression:
If True, tells the server to gzip-compress the response.
It does not apply to POST requests.
Defaults to None, which will get the value to use from
the instance variable self._use_gzip [Optional]
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
if post_data:
http_method = "POST"
else:
http_method = "GET"
if self._debugHTTP:
_debug = 1
else:
_debug = 0
http_handler = self._urllib.HTTPHandler(debuglevel = _debug)
https_handler = self._urllib.HTTPSHandler(debuglevel = _debug)
http_proxy = os.environ.get('http_proxy')
https_proxy = os.environ.get('https_proxy')
if http_proxy is None or https_proxy is None :
proxy_status = False
else :
proxy_status = True
opener = self._urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
if proxy_status is True :
proxy_handler = self._urllib.ProxyHandler({'http':str(http_proxy), 'https': str(https_proxy)})
opener.add_handler(proxy_handler)
if use_gzip_compression is None:
use_gzip = self._use_gzip
else:
use_gzip = use_gzip_compression
# Set up compression
if use_gzip and not post_data:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
if self._oauth_consumer is not None:
if post_data and http_method == "POST":
parameters = post_data.copy()
req = oauth.Request.from_consumer_and_token(self._oauth_consumer,
token = self._oauth_token,
http_method = http_method,
http_url = url, parameters = parameters)
req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
else:
url = self._BuildUrl(url, extra_params = extra_params)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
opener.close()
else:
# Unique keys are a combination of the url and the oAuth Consumer Key
if self._consumer_key:
key = self._consumer_key + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
try:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
self._cache.Set(key, url_data)
except urllib2.HTTPError, e:
print e
opener.close()
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self, root_directory = None):
self._InitializeRootDirectory(root_directory)
def Get(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self, key, data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self, key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self, key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
| gpl-3.0 |
gencer/sentry | src/sentry/web/frontend/setup_wizard.py | 1 | 2553 | from __future__ import absolute_import
from sentry.cache import default_cache
from django.core.urlresolvers import reverse
from sentry.api import client
from sentry.models import ApiToken
from sentry.api.serializers import serialize
from sentry.web.frontend.base import BaseView
from sentry.web.helpers import render_to_response
from sentry.api.endpoints.setup_wizard import SETUP_WIZARD_CACHE_KEY, SETUP_WIZARD_CACHE_TIMEOUT
class SetupWizardView(BaseView):
def get(self, request, wizard_hash):
"""
This opens a page where with an active session fill stuff into the cache
Redirects to organization whenever cache has been deleted
"""
context = {
'hash': wizard_hash
}
key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash)
wizard_data = default_cache.get(key)
if wizard_data is None:
return self.redirect_to_org(request)
orgs = client.get(
reverse('sentry-api-0-organizations'), request=request)
filled_projects = []
for org in orgs.data:
projects = client.get(reverse('sentry-api-0-organization-projects', kwargs={
'organization_slug': org.get('slug')
}), request=request)
for project in projects.data:
enriched_project = project
enriched_project['organization'] = org
keys = client.get(reverse('sentry-api-0-project-keys', kwargs={
'organization_slug': org.get('slug'),
'project_slug': project.get('slug')
}), request=request)
enriched_project['keys'] = keys.data
filled_projects.append(enriched_project)
# Fetching or creating a token
token = None
tokens = [
x for x in ApiToken.objects.filter(user=request.user).all()
if 'project:releases' in x.get_scopes()
]
if not tokens:
token = ApiToken.objects.create(
user=request.user,
scope_list=['project:releases'],
refresh_token=None,
expires_at=None,
)
else:
token = tokens[0]
result = {
'apiKeys': serialize(token),
'projects': filled_projects
}
key = '%s%s' % (SETUP_WIZARD_CACHE_KEY, wizard_hash)
default_cache.set(key, result, SETUP_WIZARD_CACHE_TIMEOUT)
return render_to_response('sentry/setup-wizard.html', context, request)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.