text
stringlengths 4
1.02M
| meta
dict |
|---|---|
'''
CRUD for the category.
'''
import json
import tornado.web
import config
from torcms.core import tools
from torcms.core.base_handler import BaseHandler
from torcms.model.category_model import MCategory
from torcms.model.post2catalog_model import MPost2Catalog
from torcms.model.post_model import MPost
class CategoryAjaxHandler(BaseHandler):
'''
Handler for category.
'''
def initialize(self, **kwargs):
super().initialize()
self.tmpl_router = 'category_ajax'
def get(self, *args, **kwargs):
url_str = args[0]
url_arr = self.parse_url(url_str)
if len(url_arr) == 2:
if url_arr[1] == 'list':
self.list_catalog(url_arr[0])
elif url_arr[0] == '_delete':
self.delete_by_id(url_arr[1])
else:
kwd = {
'info': '页面未找到',
}
self.render(
'misc/html/404.html',
kwd=kwd,
userinfo=self.userinfo,
)
def post(self, *args, **kwargs):
url_str = args[0]
if url_str == '':
return
url_arr = self.parse_url(url_str)
if url_arr[0] == '_edit':
self.update(url_arr[1])
elif url_arr[0] == '_add':
self.add()
else:
self.redirect('misc/html/404.html')
def list_catalog(self, kind):
'''
listing the category.
'''
kwd = {
'pager': '',
'title': '最近文档',
'kind': kind,
'router': config.router_post[kind]
}
self.render('admin/{0}/category_list.html'.format(self.tmpl_router),
kwd=kwd,
view=MCategory.query_all(kind),
format_date=tools.format_date,
userinfo=self.userinfo,
cfg=config.CMS_CFG)
@tornado.web.authenticated
def add(self):
'''
user add category.
'''
if self.check_post_role()['ADD']:
pass
else:
return False
post_data = self.get_request_arguments()
post_data['user_name'] = self.get_current_user()
cur_uid = post_data['uid']
if MCategory.add_or_update(cur_uid, post_data):
output = {
'addinfo ': 1,
}
else:
output = {
'addinfo ': 0,
}
return json.dump(output, self)
@tornado.web.authenticated
def update(self, uid):
'''
Update the category.
'''
if self.check_post_role()['EDIT']:
pass
else:
return False
post_data = self.get_request_arguments()
post_data['user_name'] = self.get_current_user()
if MCategory.update(uid, post_data):
output = {
'addinfo ': 1,
}
else:
output = {
'addinfo ': 0,
}
return json.dump(output, self)
@tornado.web.authenticated
def delete_by_id(self, del_id):
'''
Delete by id.
'''
print("-" * 50)
print(del_id)
if self.check_post_role()['DELETE']:
pass
else:
return False
post_resc = MPost2Catalog.query_postinfo_by_cat(del_id)
for post in post_resc:
MPost2Catalog.remove_relation(post.uid, del_id)
if MCategory.delete(del_id):
output = {'del_link': 1}
else:
output = {'del_link': 0}
return json.dump(output, self)
|
{
"content_hash": "9804a4908cc96de0a5262d84a173ea91",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 76,
"avg_line_length": 24.741496598639454,
"alnum_prop": 0.4855650261204289,
"repo_name": "bukun/TorCMS",
"id": "b9928013aab3079adb9146da83b75b9af578c7a9",
"size": "3678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "torcms/handlers/category_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "75939"
},
{
"name": "Dockerfile",
"bytes": "2243"
},
{
"name": "HTML",
"bytes": "292427"
},
{
"name": "JavaScript",
"bytes": "34394"
},
{
"name": "Makefile",
"bytes": "1108"
},
{
"name": "Python",
"bytes": "747675"
},
{
"name": "Ruby",
"bytes": "926"
},
{
"name": "SCSS",
"bytes": "550"
},
{
"name": "Sass",
"bytes": "69221"
},
{
"name": "Shell",
"bytes": "1317"
}
],
"symlink_target": ""
}
|
from typing import NamedTuple, List
from src.independent.util import chcast
from src.independent.SExpr import SExpr, sexpr_subst_mult_string, SExprOrStr
class L4Macro(NamedTuple):
macroparams: List[str]
macrobody: SExpr
def subst(self, paramvals:List[SExprOrStr]) -> SExpr:
return chcast(SExpr, sexpr_subst_mult_string(self.macrobody, self.macroparams, paramvals))
class L4BlockMacro(NamedTuple):
macroparams: List[str]
macrobody: List[SExpr]
def subst(self, paramvals:List[SExprOrStr]) -> List[SExpr]:
return [ chcast(SExpr, sexpr_subst_mult_string(x, self.macroparams, paramvals)) for x in self.macrobody ]
|
{
"content_hash": "0ec4c66863c2857e10836c80b1ffde6e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 113,
"avg_line_length": 34.421052631578945,
"alnum_prop": 0.7400611620795107,
"repo_name": "legalese/legalese-compiler",
"id": "53ec5e7d8e5923c28d5c8bbfaf491fbdffbdaa9d",
"size": "654",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "L4/pyL4/src/model/L4Macro.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Common Lisp",
"bytes": "23607"
},
{
"name": "Grammatical Framework",
"bytes": "48526"
},
{
"name": "Haskell",
"bytes": "169801"
},
{
"name": "JavaScript",
"bytes": "30376"
},
{
"name": "Makefile",
"bytes": "658"
},
{
"name": "Nix",
"bytes": "25049"
},
{
"name": "Perl",
"bytes": "1775"
},
{
"name": "Prolog",
"bytes": "2845"
},
{
"name": "Python",
"bytes": "412846"
},
{
"name": "Shell",
"bytes": "348"
},
{
"name": "TeX",
"bytes": "114879"
}
],
"symlink_target": ""
}
|
import json
def essentials_of(solution):
result = {}
for key in ['problemId', 'score', 'solution', 'seed']:
result[key] = solution[key]
return result
with open("scores.json") as f:
best = {}
for solution in json.loads(f.readline()):
problem_id = solution['problemId']
if problem_id not in best:
best[problem_id] = essentials_of(solution)
else:
if best[problem_id]['score'] < solution['score']:
best[problem_id] = essentials_of(solution)
for solution in best.values():
del solution['score']
print(json.dumps(list(best.values())))
|
{
"content_hash": "c9517e67d073fd6086ebfee95be97538",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 61,
"avg_line_length": 27.91304347826087,
"alnum_prop": 0.5872274143302181,
"repo_name": "codingteam/icfpc-2015",
"id": "01ce17839b59fa2b7389b1b39862980158eb67eb",
"size": "666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "best_solutions.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6541"
},
{
"name": "JavaScript",
"bytes": "717256"
},
{
"name": "Makefile",
"bytes": "18"
},
{
"name": "PowerShell",
"bytes": "744"
},
{
"name": "Python",
"bytes": "666"
},
{
"name": "Scala",
"bytes": "50902"
},
{
"name": "Shell",
"bytes": "24"
}
],
"symlink_target": ""
}
|
def setup():
# module-level
pass
def setup_cache():
# module-level
pass
def track_test():
# module-level 難
return 0
class MyClass:
def setup(self):
# class-level
pass
def setup_cache(self):
# class-level
pass
def track_test(self):
# class-level 難
return 0
|
{
"content_hash": "e931a3835b20e33e4192bb966a896076",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 26,
"avg_line_length": 14.375,
"alnum_prop": 0.5362318840579711,
"repo_name": "pv/asv",
"id": "faed25d8fe1c15a69038c1456467489aa9181ee1",
"size": "374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/benchmark/code_extraction.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2509"
},
{
"name": "C++",
"bytes": "11147"
},
{
"name": "CSS",
"bytes": "4110"
},
{
"name": "HTML",
"bytes": "8388"
},
{
"name": "JavaScript",
"bytes": "108411"
},
{
"name": "PowerShell",
"bytes": "2371"
},
{
"name": "Python",
"bytes": "627913"
}
],
"symlink_target": ""
}
|
import urllib
import json
import os
import requests
import datetime
import twilio.twiml
from flask import Flask
from flask import jsonify
from flask import url_for
from flask import request
from flask import make_response
from twilio.rest import TwilioRestClient
# Flask app should start in global layout
app = Flask(__name__)
# Entry to webhook from Api.ai
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print 'Request:'
print json.dumps(req, indent=4)
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
result = req.get('result')
metadata = result.get('metadata')
intentname = metadata.get('intentName')
parameters = result.get('parameters')
actionname = parameters.get('action')
accounttype = parameters.get('type')
custname = parameters.get('customername')
phoneNo = parameters.get('phonenumber')
payeename = parameters.get('transcustomername')
payeeaccounttype = parameters.get('transtype')
payeeamount = parameters.get('amount')
#Get Balance Amount for account from account id
if intentname == 'Account_Balance':
Balance = getBalance(custname, accounttype)
speech = 'Your ' + accounttype + ' account balance is ' + Balance + ' dollars'
elif intentname == 'Last_transfer':
result = getLasttransfer(custname, accounttype)
Amount = result[0][u'amount']
Transferamount = str(Amount)
date = result[0][u'transaction_date']
Transferdate = str(date)
speech = 'The last transfer you made was for ' + Transferamount + ' dollars on ' + Transferdate
elif intentname == 'Last_purchase':
result = getLastpurchase(custname, accounttype)
Amount = result[0][u'amount']
Transferamount = str(Amount)
date = result[0][u'purchase_date']
Purchasedate = str(date)
speech = 'The last purchase you made was for ' + Transferamount + ' dollars on ' + Purchasedate
elif intentname == 'Transfer_funds':
result = createTransfer(custname, accounttype, payeename, payeeaccounttype, payeeamount)
responsecode = result[u'code']
transId = result[u'objectCreated'][u'_id']
if responsecode == 201:
speech = 'Your transfer request is successful. Your transaction id is ' + transId
else:
speech = 'Your transfer is not successful'
else:
speech = 'You will be receiving a call on this number shortly'
client = TwilioRestClient(os.environ.get('TWILIO_ACCOUNT_SID'), os.environ.get('TWILIO_AUTH_TOKEN'))
client.calls.create(from_=os.environ.get('TWILIO_NUMBER'), to=phoneNo, url='http://outboundclik.herokuapp.com/outbound')
return {'speech': speech,
'displayText': speech,
'source': 'apiai-account-sample'} # "data": data, # "contextOut": [],
return res
#Helper function for Balance
def getBalance(nickname, Accounttype):
with open('details.json') as json_file:
details = json.load(json_file)
apiKey = os.environ.get('NESSIE_API_KEY')
print apiKey
if Accounttype == 'Savings':
accountId = details[nickname]['Savings']
elif Accounttype == 'Checking':
accountId = details[nickname]['Checking']
else:
accountId = details[nickname]['Credit Card']
url = 'http://api.reimaginebanking.com/accounts/{}?key={}'.format(accountId,apiKey)
print url
response = requests.get(url, headers={'content-type': 'application/json'})
result = response.json()
#print result
accountbalance = result[u'balance']
Balance = str(accountbalance)
return Balance
#Helper function for Last Transfer
def getLasttransfer(nickname, Accounttype):
with open('details.json') as json_file:
details = json.load(json_file)
apiKey = os.environ.get('NESSIE_API_KEY')
if Accounttype == 'Savings':
accountId = details[nickname]['Savings']
elif Accounttype == 'Checking':
accountId = details[nickname]['Checking']
else:
accountId = details[nickname]['Credit Card']
url = 'http://api.reimaginebanking.com/accounts/{}/transfers?type=payer&key={}'.format(accountId,apiKey)
response = requests.get(url, headers={'content-type': 'application/json'})
result = response.json()
return result
#Helper function for Last Purchase
def getLastpurchase(nickname, Accounttype):
with open('details.json') as json_file:
details = json.load(json_file)
apiKey = os.environ.get('NESSIE_API_KEY')
if Accounttype == 'Savings':
accountId = details[nickname]['Savings']
elif Accounttype == 'Checking':
accountId = details[nickname]['Checking']
else:
accountId = details[nickname]['Credit Card']
url = 'http://api.reimaginebanking.com/accounts/{}/purchases?key={}'.format(accountId,apiKey)
response = requests.get(url, headers={'content-type': 'application/json'})
result = response.json()
return result
#Helper function for Transfer funds
def createTransfer(name, fromaccount, payee, toaccount, toamount):
print "i am here"
with open('details.json') as json_file:
details = json.load(json_file)
apiKey = os.environ.get('NESSIE_API_KEY')
dateObject = datetime.date.today()
dateString = dateObject.strftime('%Y-%m-%d')
if fromaccount == 'Savings':
payeraccountId = details[name]['Savings']
elif fromaccount == 'Checking':
payeraccountId = details[name]['Checking']
else:
payeraccountId = details[name]['Credit Card']
if toaccount == 'Savings':
payeeaccountId = details[payee]['Savings']
elif toaccount == 'Checking':
payeeaccountId = details[payee]['Checking']
else:
payeeaccountId = details[payee]['Credit Card']
url = 'http://api.reimaginebanking.com/accounts/{}/transfers?key={}'.format(payeraccountId,apiKey)
print url
print payeeaccountId, payeraccountId
payload = {'medium': 'balance', 'payee_id': payeeaccountId, 'amount': float(toamount), 'transaction_date': dateString, 'description': 'Personal'}
response = requests.post(url, data=json.dumps(payload), headers={'content-type':'application/json'})
result = response.json()
print result
return result
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print 'Starting app on port %d' % port
app.run(debug=False, port=port, host='0.0.0.0')
|
{
"content_hash": "07ae659a993c667c9ea5cd72d7576d6a",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 154,
"avg_line_length": 40.35294117647059,
"alnum_prop": 0.6422740524781341,
"repo_name": "vijayraghu/python.github.io",
"id": "cc8222817e7b4549559a730b20da7ece3c30582d",
"size": "6902",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "35844"
}
],
"symlink_target": ""
}
|
import keras_tuner
from autokeras.engine import tuner as tuner_module
class RandomSearch(keras_tuner.RandomSearch, tuner_module.AutoTuner):
"""KerasTuner RandomSearch with preprocessing layer tuning."""
pass
|
{
"content_hash": "1bc3ae977d849a65f0f124dd7fde9583",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 69,
"avg_line_length": 24.444444444444443,
"alnum_prop": 0.7818181818181819,
"repo_name": "keras-team/autokeras",
"id": "3017799244959f30fd9eb93d66f893152b128a08",
"size": "807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autokeras/tuners/random_search.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1005"
},
{
"name": "JavaScript",
"bytes": "307"
},
{
"name": "Makefile",
"bytes": "704"
},
{
"name": "Python",
"bytes": "548809"
},
{
"name": "Shell",
"bytes": "2084"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.util import display_for_field, label_for_field, lookup_field
from django.contrib.admin.util import NestedObjects
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
from django.contrib.sites.models import Site
from django.db import models
from django.test import TestCase
from django.utils import unittest
from django.utils.formats import localize
from models import Article, Count, Event, Location
class NestedObjectsTests(TestCase):
"""
Tests for ``NestedObject`` utility collection.
"""
def setUp(self):
self.n = NestedObjects()
self.objs = [Count.objects.create(num=i) for i in range(5)]
def _check(self, target):
self.assertEquals(self.n.nested(lambda obj: obj.num), target)
def _add(self, obj, parent=None):
# don't bother providing the extra args that NestedObjects ignores
self.n.add(None, None, obj, None, parent)
def test_unrelated_roots(self):
self._add(self.objs[0])
self._add(self.objs[1])
self._add(self.objs[2], self.objs[1])
self._check([0, 1, [2]])
def test_siblings(self):
self._add(self.objs[0])
self._add(self.objs[1], self.objs[0])
self._add(self.objs[2], self.objs[0])
self._check([0, [1, 2]])
def test_duplicate_instances(self):
self._add(self.objs[0])
self._add(self.objs[1])
dupe = Count.objects.get(num=1)
self._add(dupe, self.objs[0])
self._check([0, 1])
def test_non_added_parent(self):
self._add(self.objs[0], self.objs[1])
self._check([0])
def test_cyclic(self):
self._add(self.objs[0], self.objs[2])
self._add(self.objs[1], self.objs[0])
self._add(self.objs[2], self.objs[1])
self._add(self.objs[0], self.objs[2])
self._check([0, [1, [2]]])
class UtilTests(unittest.TestCase):
def test_values_from_lookup_field(self):
"""
Regression test for #12654: lookup_field
"""
SITE_NAME = 'example.com'
TITLE_TEXT = 'Some title'
CREATED_DATE = datetime.min
ADMIN_METHOD = 'admin method'
SIMPLE_FUNCTION = 'function'
INSTANCE_ATTRIBUTE = 'attr'
class MockModelAdmin(object):
def get_admin_value(self, obj):
return ADMIN_METHOD
simple_function = lambda obj: SIMPLE_FUNCTION
article = Article(
site=Site(domain=SITE_NAME),
title=TITLE_TEXT,
created=CREATED_DATE,
)
article.non_field = INSTANCE_ATTRIBUTE
verifications = (
('site', SITE_NAME),
('created', localize(CREATED_DATE)),
('title', TITLE_TEXT),
('get_admin_value', ADMIN_METHOD),
(simple_function, SIMPLE_FUNCTION),
('test_from_model', article.test_from_model()),
('non_field', INSTANCE_ATTRIBUTE)
)
mock_admin = MockModelAdmin()
for name, value in verifications:
field, attr, resolved_value = lookup_field(name, article, mock_admin)
if field is not None:
resolved_value = display_for_field(resolved_value, field)
self.assertEqual(value, resolved_value)
def test_null_display_for_field(self):
"""
Regression test for #12550: display_for_field should handle None
value.
"""
display_value = display_for_field(None, models.CharField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.CharField(
choices=(
(None, "test_none"),
)
))
self.assertEqual(display_value, "test_none")
display_value = display_for_field(None, models.DateField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.TimeField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
# Regression test for #13071: NullBooleanField has special
# handling.
display_value = display_for_field(None, models.NullBooleanField())
expected = u'<img src="%simg/admin/icon-unknown.gif" alt="None" />' % settings.ADMIN_MEDIA_PREFIX
self.assertEqual(display_value, expected)
display_value = display_for_field(None, models.DecimalField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.FloatField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
def test_label_for_field(self):
"""
Tests for label_for_field
"""
self.assertEquals(
label_for_field("title", Article),
"title"
)
self.assertEquals(
label_for_field("title2", Article),
"another name"
)
self.assertEquals(
label_for_field("title2", Article, return_attr=True),
("another name", None)
)
self.assertEquals(
label_for_field("__unicode__", Article),
"article"
)
self.assertEquals(
label_for_field("__str__", Article),
"article"
)
self.assertRaises(
AttributeError,
lambda: label_for_field("unknown", Article)
)
def test_callable(obj):
return "nothing"
self.assertEquals(
label_for_field(test_callable, Article),
"Test callable"
)
self.assertEquals(
label_for_field(test_callable, Article, return_attr=True),
("Test callable", test_callable)
)
self.assertEquals(
label_for_field("test_from_model", Article),
"Test from model"
)
self.assertEquals(
label_for_field("test_from_model", Article, return_attr=True),
("Test from model", Article.test_from_model)
)
self.assertEquals(
label_for_field("test_from_model_with_override", Article),
"not What you Expect"
)
self.assertEquals(
label_for_field(lambda x: "nothing", Article),
"--"
)
class MockModelAdmin(object):
def test_from_model(self, obj):
return "nothing"
test_from_model.short_description = "not Really the Model"
self.assertEquals(
label_for_field("test_from_model", Article, model_admin=MockModelAdmin),
"not Really the Model"
)
self.assertEquals(
label_for_field("test_from_model", Article,
model_admin = MockModelAdmin,
return_attr = True
),
("not Really the Model", MockModelAdmin.test_from_model)
)
def test_related_name(self):
"""
Regression test for #13963
"""
self.assertEquals(
label_for_field('location', Event, return_attr=True),
('location', None),
)
self.assertEquals(
label_for_field('event', Location, return_attr=True),
('awesome event', None),
)
self.assertEquals(
label_for_field('guest', Event, return_attr=True),
('awesome guest', None),
)
|
{
"content_hash": "ec20803235b2fd3917e56d9ee0b60fb6",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 105,
"avg_line_length": 31.57563025210084,
"alnum_prop": 0.5773785761809714,
"repo_name": "faun/django_test",
"id": "518a16d1513d580b47b31acf09c116962b127e82",
"size": "7515",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/regressiontests/admin_util/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "170964"
},
{
"name": "Python",
"bytes": "10226802"
},
{
"name": "Shell",
"bytes": "3519"
}
],
"symlink_target": ""
}
|
import paddle.v2 as paddle
import data_provider
import vgg_ssd_net
import os, sys
import gzip
from config.pascal_voc_conf import cfg
def eval(eval_file_list, batch_size, data_args, model_path):
cost, detect_out = vgg_ssd_net.net_conf(mode='eval')
assert os.path.isfile(model_path), 'Invalid model.'
parameters = paddle.parameters.Parameters.from_tar(gzip.open(model_path))
optimizer = paddle.optimizer.Momentum()
trainer = paddle.trainer.SGD(
cost=cost,
parameters=parameters,
extra_layers=[detect_out],
update_equation=optimizer)
feeding = {'image': 0, 'bbox': 1}
reader = paddle.batch(
data_provider.test(data_args, eval_file_list), batch_size=batch_size)
result = trainer.test(reader=reader, feeding=feeding)
print "TestCost: %f, Detection mAP=%g" % \
(result.cost, result.metrics['detection_evaluator'])
if __name__ == "__main__":
paddle.init(use_gpu=True, trainer_count=4) # use 4 gpus
data_args = data_provider.Settings(
data_dir='./data',
label_file='label_list',
resize_h=cfg.IMG_HEIGHT,
resize_w=cfg.IMG_WIDTH,
mean_value=[104, 117, 124])
eval(
eval_file_list='./data/test.txt',
batch_size=4,
data_args=data_args,
model_path='models/pass-00000.tar.gz')
|
{
"content_hash": "acb272c11c486ae36ee8f90ae4d583b5",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 77,
"avg_line_length": 28.083333333333332,
"alnum_prop": 0.6379821958456974,
"repo_name": "xinghai-sun/models",
"id": "345e46f98b098480877a54dac842bd576112b1a3",
"size": "1348",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "ssd/eval.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "34663"
},
{
"name": "HTML",
"bytes": "174618"
},
{
"name": "Python",
"bytes": "509771"
},
{
"name": "Shell",
"bytes": "30390"
}
],
"symlink_target": ""
}
|
import codecs
from html.entities import codepoint2name
from html.entities import name2codepoint
import re
from urllib.parse import quote_plus
import markupsafe
html_escape = markupsafe.escape
xml_escapes = {
"&": "&",
">": ">",
"<": "<",
'"': """, # also " in html-only
"'": "'", # also ' in html-only
}
def xml_escape(string):
return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string)
def url_escape(string):
# convert into a list of octets
string = string.encode("utf8")
return quote_plus(string)
def trim(string):
return string.strip()
class Decode:
def __getattr__(self, key):
def decode(x):
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
return decode(str(x))
else:
return str(x, encoding=key)
return decode
decode = Decode()
class XMLEntityEscaper:
def __init__(self, codepoint2name, name2codepoint):
self.codepoint2entity = {
c: str("&%s;" % n) for c, n in codepoint2name.items()
}
self.name2codepoint = name2codepoint
def escape_entities(self, text):
"""Replace characters with their character entity references.
Only characters corresponding to a named entity are replaced.
"""
return str(text).translate(self.codepoint2entity)
def __escape(self, m):
codepoint = ord(m.group())
try:
return self.codepoint2entity[codepoint]
except (KeyError, IndexError):
return "&#x%X;" % codepoint
__escapable = re.compile(r'["&<>]|[^\x00-\x7f]')
def escape(self, text):
"""Replace characters with their character references.
Replace characters by their named entity references.
Non-ASCII characters, if they do not have a named entity reference,
are replaced by numerical character references.
The return value is guaranteed to be ASCII.
"""
return self.__escapable.sub(self.__escape, str(text)).encode("ascii")
# XXX: This regexp will not match all valid XML entity names__.
# (It punts on details involving involving CombiningChars and Extenders.)
#
# .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef
__characterrefs = re.compile(
r"""& (?:
\#(\d+)
| \#x([\da-f]+)
| ( (?!\d) [:\w] [-.:\w]+ )
) ;""",
re.X | re.UNICODE,
)
def __unescape(self, m):
dval, hval, name = m.groups()
if dval:
codepoint = int(dval)
elif hval:
codepoint = int(hval, 16)
else:
codepoint = self.name2codepoint.get(name, 0xFFFD)
# U+FFFD = "REPLACEMENT CHARACTER"
if codepoint < 128:
return chr(codepoint)
return chr(codepoint)
def unescape(self, text):
"""Unescape character references.
All character references (both entity references and numerical
character references) are unescaped.
"""
return self.__characterrefs.sub(self.__unescape, text)
_html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint)
html_entities_escape = _html_entities_escaper.escape_entities
html_entities_unescape = _html_entities_escaper.unescape
def htmlentityreplace_errors(ex):
"""An encoding error handler.
This python codecs error handler replaces unencodable
characters with HTML entities, or, if no HTML entity exists for
the character, XML character references::
>>> 'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace')
'The cost was €12.'
"""
if isinstance(ex, UnicodeEncodeError):
# Handle encoding errors
bad_text = ex.object[ex.start : ex.end]
text = _html_entities_escaper.escape(bad_text)
return (str(text), ex.end)
raise ex
codecs.register_error("htmlentityreplace", htmlentityreplace_errors)
DEFAULT_ESCAPES = {
"x": "filters.xml_escape",
"h": "filters.html_escape",
"u": "filters.url_escape",
"trim": "filters.trim",
"entity": "filters.html_entities_escape",
"unicode": "str",
"decode": "decode",
"str": "str",
"n": "n",
}
|
{
"content_hash": "0cbeb8e96dcd0d9eb8a6b94389dd5cf5",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 77,
"avg_line_length": 28.455128205128204,
"alnum_prop": 0.5845911241270556,
"repo_name": "chromium/chromium",
"id": "af202f3f5294605bf4ed0efc9726832f834bbf26",
"size": "4658",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "third_party/mako/mako/mako/filters.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
'''
Copyright (C) 2022, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'Beluga CDN (Beluga)'
def is_waf(self):
schemes = [
self.matchHeader(('Server', r'Beluga')),
self.matchCookie(r'^beluga_request_trail=')
]
if any(i for i in schemes):
return True
return False
|
{
"content_hash": "849f9cc1e150ab45fae452555df60774",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 51,
"avg_line_length": 20.875,
"alnum_prop": 0.6227544910179641,
"repo_name": "EnableSecurity/wafw00f",
"id": "5da92287a93e0af989262400cfeaee7550c2f78b",
"size": "356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wafw00f/plugins/beluga.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "97"
},
{
"name": "Makefile",
"bytes": "339"
},
{
"name": "Python",
"bytes": "109477"
}
],
"symlink_target": ""
}
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from abc import abstractmethod
from copy import deepcopy
from inspect import isclass
from streamalert.shared.config import load_config
from streamalert.shared.importer import import_folders
from streamalert.shared.logger import get_logger
LOGGER = get_logger(__name__)
class PublisherError(Exception):
"""Exception to raise for any errors with invalid publishers"""
class PublisherRegistrationError(PublisherError):
"""Exception to raise when an error occurs during the @Register step of a publisher"""
class PublisherAssemblyError(PublisherError):
"""Exception to raise when a publisher fails lookup or assembly"""
class Register:
"""This is a decorator used to register publishers into the AlertPublisherRepository."""
def __new__(cls, class_or_function):
AlertPublisherRepository.register_publisher(class_or_function)
return class_or_function # Return the definition, not the instantiated object
class AlertPublisher:
"""Interface for a Publisher. All class-based publishers must inherit from this class."""
@abstractmethod
def publish(self, alert, publication):
"""Publishes the given alert.
As a general rule of thumb, published fields that are specific to a certain output are
published as top-level keys of the following format:
[output service name].[field name]
E.g. "demisto.blah"
Args:
alert (Alert): The alert instance to publish.
publication (dict): An existing publication generated by previous publishers in the
series of publishers, or {}.
Returns:
dict: The published alert.
"""
class CompositePublisher(AlertPublisher):
"""A publisher class that combines the logic of multiple other publishers together in series
To reduce the chance that one publisher has side effects in other publishers in the chain,
we use deepcopy between the publishers.
Note: This publisher is not meant to be @Register'd as it does not have any logic on its own.
It is only meant to be composed by AlertPublisherRepository to give a common interface to
multiple publishers chained in sequence.
"""
def __init__(self, publishers):
self._publishers = publishers # Type list(AlertPublisher)
for publisher in self._publishers:
if not isinstance(publisher, AlertPublisher):
LOGGER.error('CompositePublisher given invalid publisher')
def publish(self, alert, publication):
for publisher in self._publishers:
try:
publication = deepcopy(publication)
publication = publisher.publish(alert, publication)
except KeyError:
LOGGER.exception(
'CompositePublisher encountered KeyError with publisher: %s',
publisher.__class__.__name__
)
raise
return publication
class WrappedFunctionPublisher(AlertPublisher):
"""A class only used to wrap a function publisher."""
def __init__(self, function):
self._function = function
def publish(self, alert, publication):
return self._function(alert, publication)
class AlertPublisherRepository:
"""A repository mapping names -> publishers
As a usability optimization, using this Repository will eagerly load and register all
publishers in the application.
"""
_publishers = {}
_is_imported = False
@classmethod
def import_publishers(cls):
if not cls._is_imported:
config = load_config()
import_folders(*config['global']['general'].get('publisher_locations', []))
cls._is_imported = True
@staticmethod
def is_valid_publisher(thing):
"""Returns TRUE if the given reference can be registered as a publisher
Publishers are valid if and only if they fall into one of the following categories:
* They are a python function that accepts 2 arguments: (alert: Alert, publication: dict)
* They are a python class that extends AlertPublisher
Args:
thing (mixed): Any primitive or reference to be checked
Returns:
bool
"""
# We have to put the isclass() check BEFORE the callable() check because classes are also
# callable!
return issubclass(thing, AlertPublisher) if isclass(thing) else callable(thing)
@staticmethod
def get_publisher_name(class_or_function):
"""Given a class or function, will return its fully qualified name.
This is useful for assigning a unique string name for a publisher.
Args:
class_or_function (callable|Class): A reference to a python function or class
Returns:
string
"""
return '{}.{}'.format(class_or_function.__module__, class_or_function.__name__)
@classmethod
def register_publisher(cls, publisher):
"""Registers the publisher into the repository.
To standardize the interface of publishers, if a function publisher is given, it will be
wrapped with a WrappedFunctionPublisher instance prior to being registed into the
Repository.
Args:
publisher (callable|AlertPublisher): An instance of a publisher class or a function
"""
if not AlertPublisherRepository.is_valid_publisher(publisher):
error = (
'Could not register publisher {}; Not callable nor subclass of AlertPublisher'
).format(publisher)
raise PublisherRegistrationError(error)
if isclass(publisher):
# If the provided publisher is a Class, then we simply need to instantiate an instance
# of the class and register it.
publisher_instance = publisher()
else:
# If the provided publisher is a function, we wrap it with a WrappedFunctionPublisher
# to make them easier to handle.
publisher_instance = WrappedFunctionPublisher(publisher)
name = AlertPublisherRepository.get_publisher_name(publisher)
if name in cls._publishers:
error = 'Publisher with name [{}] has already been registered.'.format(name)
raise PublisherRegistrationError(error)
cls._publishers[name] = publisher_instance
@classmethod
def get_publisher(cls, name):
"""Returns the publisher with the given name
Args:
name (str): The name of the publisher.
Returns:
AlertPublisher|None
"""
if cls.has_publisher(name):
return cls._publishers[name]
LOGGER.error('Publisher [%s] does not exist', name)
@classmethod
def has_publisher(cls, name):
"""Returns true if the given publisher name has been registered in this Repository
"""
cls.import_publishers()
return name in cls._publishers
@classmethod
def all_publishers(cls):
"""Returns all registered publishers in a dict mapping their unique name to instances.
Remember: Function publishers are wrapped with WrappedFunctionPublisher
Also remember: These publishers are INSTANCES of the publisher classes, not the classes
themselves.
Returns:
dict
"""
return cls._publishers
@classmethod
def create_composite_publisher(cls, publisher_names):
"""Assembles a single publisher that combines logic from multiple publishers
Args:
publisher_names (list(str)): A list of string names of publishers
Return:
CompositePublisher|DefaultPublisher
"""
publisher_names = publisher_names or []
publishers = []
for publisher_name in publisher_names:
publisher = cls.get_publisher(publisher_name)
if publisher:
publishers.append(publisher)
if not publishers:
# If no publishers were given, or if all of the publishers failed to load, then we
# load a default publisher.
default_publisher_name = cls.get_publisher_name(DefaultPublisher)
return cls.get_publisher(default_publisher_name)
return CompositePublisher(publishers)
@Register
class DefaultPublisher(AlertPublisher):
"""The default publisher that is used when no other publishers are provided"""
def publish(self, alert, publication):
return alert.output_dict()
|
{
"content_hash": "dd9147a4983677bea863933b921a112e",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 99,
"avg_line_length": 34.37593984962406,
"alnum_prop": 0.664588801399825,
"repo_name": "airbnb/streamalert",
"id": "500198abf8ed64f574481c1dd52178f99f9a4214",
"size": "9144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "streamalert/shared/publisher.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HCL",
"bytes": "142275"
},
{
"name": "Python",
"bytes": "2209853"
},
{
"name": "Shell",
"bytes": "2975"
}
],
"symlink_target": ""
}
|
import argparse
import sys
import time
from rclpy.executors import ExternalShutdownException
from test_msgs.action import Fibonacci
from test_msgs.action import NestedMessage
class ExpectedGoal:
def is_goal_expected(self, goal):
raise NotImplementedError('is_goal_expected() is not implemented')
def execute_goal(self, goal_handle):
raise NotImplementedError('execute_goal() is not implemented')
def receive_goals(node, action_type, expected_goals):
from rclpy.action import ActionServer
def execute_callback(goal_handle):
for expected_goal in expected_goals:
if expected_goal.is_goal_expected(goal_handle.request):
return expected_goal.execute_goal(goal_handle)
# Not an expected goal (this should not happen)
print('Unexpected goal received by action server', file=sys.stderr)
goal_handle.abort()
return action_type.Result()
action_name = 'test/action/' + action_type.__name__
return ActionServer(node, action_type, action_name, execute_callback)
def generate_expected_fibonacci_goals():
import rclpy
expected_goals = []
def is_goal_expected(goal):
return (isinstance(goal, Fibonacci.Goal) and 10 == goal.order)
def execute_goal(goal_handle):
goal = goal_handle.request
feedback = Fibonacci.Feedback()
feedback.sequence = [0, 1]
for i in range(1, goal.order):
if not rclpy.ok():
goal_handle.abort()
return Fibonacci.Result()
# Check if the goal was canceled
if goal_handle.is_cancel_requested:
goal_handle.canceled()
result = Fibonacci.Result()
result.sequence = feedback.sequence
print('Goal was canceled')
return result
# Update the sequence.
feedback.sequence.append(feedback.sequence[i] + feedback.sequence[i-1])
# Publish feedback
goal_handle.publish_feedback(feedback)
print('Publishing feedback')
# 10 Hz update rate
time.sleep(0.1)
# Send final result
result = Fibonacci.Result()
result.sequence = feedback.sequence
goal_handle.succeed()
print('Goal succeeded')
return result
expected_goal = ExpectedGoal()
expected_goal.is_goal_expected = is_goal_expected
expected_goal.execute_goal = execute_goal
expected_goals.append(expected_goal)
return expected_goals
def generate_expected_nested_message_goals():
import rclpy
expected_goals = []
def is_goal_expected(goal):
return (isinstance(goal, NestedMessage.Goal) and
goal.nested_field_no_pkg.duration_value.sec > 0)
def execute_goal(goal_handle):
goal = goal_handle.request
feedback = NestedMessage.Feedback()
feedback.nested_different_pkg.sec = 2 * goal.nested_field_no_pkg.duration_value.sec
result = NestedMessage.Result()
result.nested_field.int32_value = 4 * goal.nested_field_no_pkg.duration_value.sec
num_feedback = 10
for i in range(0, num_feedback):
if not rclpy.ok():
goal_handle.abort()
return NestedMessage.Result()
# Check if the goal was canceled
if goal_handle.is_cancel_requested:
goal_handle.canceled()
print('Goal was canceled')
return result
# Publish feedback
goal_handle.publish_feedback(feedback)
print('Publishing feedback')
# 10 Hz update rate
time.sleep(0.1)
# Send final result
goal_handle.succeed()
print('Goal succeeded')
return result
expected_goal = ExpectedGoal()
expected_goal.is_goal_expected = is_goal_expected
expected_goal.execute_goal = execute_goal
expected_goals.append(expected_goal)
return expected_goals
if __name__ == '__main__':
import rclpy
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('action_type', help='type of the ROS action')
parser.add_argument('namespace', help='namespace of the ROS node')
args = parser.parse_args()
rclpy.init(args=[])
node = rclpy.create_node('action_server', namespace=args.namespace)
if 'Fibonacci' == args.action_type:
action_server = receive_goals(node, Fibonacci, generate_expected_fibonacci_goals())
elif 'NestedMessage' == args.action_type:
action_server = receive_goals(
node,
NestedMessage,
generate_expected_nested_message_goals(),
)
else:
print('Unknown action type {!r}'.format(args.action_type), file=sys.stderr)
node.destroy_node()
rclpy.shutdown()
sys.exit(1)
try:
rclpy.spin(node)
except KeyboardInterrupt:
print('Action server stopped cleanly')
except ExternalShutdownException:
sys.exit(1)
except BaseException:
print('Exception in action server:', file=sys.stderr)
raise
finally:
rclpy.try_shutdown()
node.destroy_node()
|
{
"content_hash": "3e34853b669deda759bf3bf2f3248f98",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 92,
"avg_line_length": 30.142857142857142,
"alnum_prop": 0.6269194312796208,
"repo_name": "ros2/system_tests",
"id": "4fa0e1415f21fbacf935f7d81ceca365d6c6b7b5",
"size": "5877",
"binary": false,
"copies": "1",
"ref": "refs/heads/rolling",
"path": "test_communication/test/action_server_py.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "322827"
},
{
"name": "CMake",
"bytes": "55713"
},
{
"name": "Python",
"bytes": "44951"
}
],
"symlink_target": ""
}
|
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Boreas'
copyright = u'2013, Karol Majta'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Boreasdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Boreas.tex', u'Boreas Documentation',
u'Karol Majta', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'boreas', u'Boreas Documentation',
[u'Karol Majta'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Boreas', u'Boreas Documentation',
u'Karol Majta', 'Boreas', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
{
"content_hash": "403900a2d1789683c5e3ac1834d00854",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 80,
"avg_line_length": 31.847161572052403,
"alnum_prop": 0.7002605237899355,
"repo_name": "karolmajta/boreas",
"id": "bbfb38d86864dc4309b175b7e2e90f95e70941d9",
"size": "7710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25324"
},
{
"name": "Shell",
"bytes": "5096"
}
],
"symlink_target": ""
}
|
import smbus
# ===========================================================================
# Adafruit_I2C Class
# ===========================================================================
class Adafruit_I2C :
@staticmethod
def getPiRevision():
"Gets the version number of the Raspberry Pi board"
# Courtesy quick2wire-python-api
# https://github.com/quick2wire/quick2wire-python-api
try:
with open('/proc/cpuinfo','r') as f:
for line in f:
if line.startswith('Revision'):
return 1 if line.rstrip()[-1] in ['1','2'] else 2
except:
return 0
@staticmethod
def getPiI2CBusNumber():
# Gets the I2C bus number /dev/i2c#
return 1 if Adafruit_I2C.getPiRevision() > 1 else 0
def __init__(self, address, busnum=-1, debug=False):
self.address = address
# By default, the correct I2C bus is auto-detected using /proc/cpuinfo
# Alternatively, you can hard-code the bus version below:
# self.bus = smbus.SMBus(0); # Force I2C0 (early 256MB Pi's)
# self.bus = smbus.SMBus(1); # Force I2C1 (512MB Pi's)
self.bus = smbus.SMBus(
busnum if busnum >= 0 else Adafruit_I2C.getPiI2CBusNumber())
self.debug = debug
def reverseByteOrder(self, data):
"Reverses the byte order of an int (16-bit) or long (32-bit) value"
# Courtesy Vishal Sapre
byteCount = len(hex(data)[2:].replace('L','')[::2])
val = 0
for i in range(byteCount):
val = (val << 8) | (data & 0xff)
data >>= 8
return val
def errMsg(self):
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def write8(self, reg, value):
"Writes an 8-bit value to the specified register/address"
try:
self.bus.write_byte_data(self.address, reg, value)
if self.debug:
print "I2C: Wrote 0x%02X to register 0x%02X" % (value, reg)
except IOError, err:
return self.errMsg()
def write16(self, reg, value):
"Writes a 16-bit value to the specified register/address pair"
try:
self.bus.write_word_data(self.address, reg, value)
if self.debug:
print ("I2C: Wrote 0x%02X to register pair 0x%02X,0x%02X" %
(value, reg, reg+1))
except IOError, err:
return self.errMsg()
def writeList(self, reg, list):
"Writes an array of bytes using I2C format"
try:
if self.debug:
print "I2C: Writing list to register 0x%02X:" % reg
print list
self.bus.write_i2c_block_data(self.address, reg, list)
except IOError, err:
return self.errMsg()
def readList(self, reg, length):
"Read a list of bytes from the I2C device"
try:
results = self.bus.read_i2c_block_data(self.address, reg, length)
if self.debug:
print ("I2C: Device 0x%02X returned the following from reg 0x%02X" %
(self.address, reg))
print results
return results
except IOError, err:
return self.errMsg()
def readU8(self, reg):
"Read an unsigned byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if self.debug:
print ("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" %
(self.address, result & 0xFF, reg))
return result
except IOError, err:
return self.errMsg()
def readS8(self, reg):
"Reads a signed byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if result > 127: result -= 256
if self.debug:
print ("I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" %
(self.address, result & 0xFF, reg))
return result
except IOError, err:
return self.errMsg()
def readU16(self, reg):
"Reads an unsigned 16-bit value from the I2C device"
try:
result = self.bus.read_word_data(self.address, reg)
if self.debug:
print ("I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" %
(self.address, result & 0xFFFF, reg))
return result
except IOError, err:
return self.errMsg()
def readS16(self, reg):
"Reads a signed 16-bit value from the I2C device"
try:
result = self.bus.read_word_data(self.address, reg)
if result > 32767: result -= 65536
if self.debug:
print ("I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" %
(self.address, result & 0xFFFF, reg))
return result
except IOError, err:
return self.errMsg()
if __name__ == '__main__':
try:
bus = Adafruit_I2C(address=0)
print "Default I2C bus is accessible"
except:
print "Error accessing default I2C bus"
|
{
"content_hash": "c8c9373c620cda484b10d8efa9ce8530",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 77,
"avg_line_length": 32.145833333333336,
"alnum_prop": 0.603802117087924,
"repo_name": "Tyler-Ward/GolemClock",
"id": "77f3795f05b45235ecf05cfcca6ae6d0581ca19b",
"size": "4648",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "display/Adafruit_I2C.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "59252"
},
{
"name": "Python",
"bytes": "51747"
},
{
"name": "Shell",
"bytes": "247"
}
],
"symlink_target": ""
}
|
import sys
from struct import unpack_from as old_unpack_from
from struct import unpack_from as old_unpack
from struct import calcsize
from collections import OrderedDict
# From: http://code.activestate.com/recipes/577197-sortedcollection/
from SortedCollection import SortedCollection
MEGABYTE = 1024 * 1024
class LRUQueue(object):
"""
LRUQueue is a data structure that orders objects by
their insertion time, and supports an update/touch operation
that resets an item to the newest slot.
This is an example of a priority queue, ordered by
insertion time, with explicit support for "touch".
"""
def __init__(self, key=lambda n: n):
"""
The `key` parameter may be provided if the
items in the queue are complex.
The `key` parameter should select a unique "id" field from
each item.
"""
super(LRUQueue, self).__init__()
self._q = OrderedDict()
self._key = key
def push(self, v):
k = self._key(v)
self._q[k] = v
def pop(self):
return self._q.popitem(last=False)[1]
def touch(self, v):
"""
Reset the given value back to the newest slot.
"""
k = self._key(v)
del self._q[k]
self._q[k] = v
def size(self):
return len(self._q)
def __len__(self):
return self.size()
@staticmethod
def test():
q = LRUQueue()
assert q.size() == 0
assert len(q) == 0
q.push(0)
assert q.size() == 1
assert len(q) == 1
assert q.pop() == 0
assert q.size() == 0
assert len(q) == 0
q.push(0)
q.push(1)
assert q.pop() == 0
assert q.pop() == 1
q.push(0)
q.push(1)
q.touch(0)
assert q.pop() == 1
assert q.pop() == 0
q = LRUQueue(key=lambda n: n[0])
q.push([0])
assert q.pop() == [0]
q.push([0])
q.push([1])
assert q.pop() == [0]
assert q.pop() == [1]
return True
class BoundedLRUQueue(object):
"""
BoundedLRUQueue is a LRUQueue with a finite capacity.
When an item is pushed that causes the capacity to be exceeded,
the LRU item is automatically popped.
Otherwise, this class behaves just like the LRUQueue.
"""
def __init__(self, capacity, key=lambda n: n):
"""
The `key` parameter may be provided if the
items in the queue are complex.
The `key` parameter should select a unique "id" field from
each item.
"""
super(BoundedLRUQueue, self).__init__()
self._q = LRUQueue(key)
self._capacity = capacity
def push(self, v):
self._q.push(v)
if len(self._q) > self._capacity:
return self._q.pop()
def pop(self):
return self._q.pop()
def touch(self, v):
self._q.touch(v)
def size(self):
return len(self._q)
def __len__(self):
return self.size()
@staticmethod
def test():
q = BoundedLRUQueue(5)
assert q.size() == 0
assert len(q) == 0
q.push(0)
assert q.size() == 1
assert len(q) == 1
assert q.pop() == 0
assert q.size() == 0
assert len(q) == 0
q.push(0)
q.push(1)
assert q.pop() == 0
assert q.pop() == 1
q.push(0)
q.push(1)
q.touch(0)
assert q.pop() == 1
assert q.pop() == 0
q = BoundedLRUQueue(5, key=lambda n: n[0])
q.push([0])
assert q.pop() == [0]
q.push([0])
q.push([1])
assert q.pop() == [0]
assert q.pop() == [1]
q = BoundedLRUQueue(2)
assert q.push(0) is None
assert q.push(1) is None
assert q.push(2) == 0
assert q.pop() == 1
assert q.pop() == 2
return True
class RangeCache(object):
"""
RangeCache is a data structure that tracks a finite set of
ranges (a range is a 2-tuple consisting of a numeric start
and numeric length). New ranges can be added via the `push`
method, and if such a call causes the capacity to be exceeded,
then the "oldest" range is removed. The `get` method implements
an efficient lookup for a single value that may be found within
one of the ranges.
"""
def __init__(self, capacity,
start_key=lambda o: o[0],
length_key=lambda o: o[1]):
"""
@param key: A function that fetches the range start from an item.
"""
super(RangeCache, self).__init__()
self._ranges = SortedCollection(key=start_key)
self._lru = BoundedLRUQueue(capacity, key=start_key)
self._start_key = start_key
self._length_key = length_key
def push(self, o):
"""
Add a range to the cache.
If `key` is not provided to the constructor, then
`o` should be a 3-tuple:
- range start (numeric)
- range length (numeric)
- range item (object)
"""
self._ranges.insert(o)
popped = self._lru.push(o)
if popped is not None:
self._ranges.remove(popped)
def touch(self, o):
self._lru.touch(o)
def get(self, value):
"""
Search for the numeric `value` within the ranges
tracked by this cache.
@raise ValueError: if the value is not found in the range cache.
"""
hit = self._ranges.find_le(value)
if value < self._start_key(hit) + self._length_key(hit):
return hit
raise ValueError("%s not found in range cache" % value)
@staticmethod
def test():
q = RangeCache(2)
x = None
try: x = q.get(0)
except ValueError: pass
assert x is None
x = None
try: x = q.get(1)
except ValueError: pass
assert x is None
q.push((1, 1, [0]))
x = None
try: x = q.get(0)
except ValueError: pass
assert x is None
assert q.get(1) == (1, 1, [0])
assert q.get(1.99) == (1, 1, [0])
x = None
try: x = q.get(2.01)
except ValueError: pass
assert x is None
q.push((3, 1, [1]))
assert q.get(1) == (1, 1, [0])
assert q.get(3) == (3, 1, [1])
q.push((5, 1, [2]))
x = None
try: x = q.get(1)
except ValueError: pass
assert x is None
assert q.get(3) == (3, 1, [1])
assert q.get(5) == (5, 1, [2])
q.touch((3, 1, [1]))
q.push((7, 1, [3]))
assert q.get(3) == (3, 1, [1])
assert q.get(7) == (7, 1, [3])
x = None
try: x = q.get(5)
except ValueError: pass
assert x is None
return True
class FileMap(object):
"""
FileMap is a wrapper for a file-like object that satisfies the
buffer interface. This is essentially the inverse of StringIO.
It implements a caching layer over the calls to the OS seek/read
functions for improved performance.
Q: Why might you want this over mmap?
A: 1) Its pure Python
2) You can stack this over any Python file-like objects.
eg. FileMap over ZipFile gives you a random access buffer
thats backed by a compressed image on the file system.
"""
def __init__(self, filelike, block_size=MEGABYTE,
cache_size=10, size=None):
"""
If `size` is not provided, then `filelike` must have the
`seek` and `tell` methods implemented.
"""
super(FileMap, self).__init__()
if size is None:
import os
filelike.seek(0, os.SEEK_END)
size = filelike.tell()
self._f = filelike
self._block_size = block_size
self._size = size
self._block_cache = RangeCache(cache_size)
def __getitem__(self, index):
if index < 0:
index = self._size + index
block_index = index % self._block_size
block_start = index - block_index
try:
hit = self._block_cache.get(index)
buf = hit[2]
self._block_cache.touch(hit)
return buf[block_index]
except ValueError:
self._f.seek(block_start)
buf = self._f.read(self._block_size)
self._block_cache.push((block_start, self._block_size, buf))
return buf[block_index]
def _get_containing_block(self, index):
"""
Given an index, return block-aligned block that contains it,
updating the appropriate caches.
"""
block_index = index % self._block_size
block_start = index - block_index
try:
hit = self._block_cache.get(block_start)
buf = hit[2]
self._block_cache.touch(hit)
return buf
except ValueError:
self._f.seek(block_start)
buf = self._f.read(self._block_size)
self._block_cache.push((block_start, self._block_size, buf))
return buf
def __getslice__(self, start, end):
if end == sys.maxint:
end = self._size
start_block_index = start % self._block_size
start_block_start = start - start_block_index
end_block_index = end % self._block_size
end_block_start = end - end_block_index
if start_block_start == end_block_start:
# easy case, everything falls within the same block
buf = self._get_containing_block(start)
return buf[start_block_index:end_block_index]
else:
# hard case, slice goes over one or more block boundaries
ret = ""
# phase 1, start to block boundary
buf = self._get_containing_block(start_block_start)
s = start_block_index
e = start_block_start + self._block_size
ret += buf[s:e]
# phase 2, any complete blocks
cur_block_start = start_block_start + self._block_size
while cur_block_start + self._block_size < end_block_start:
buf = self._get_containing_block(cur_block_start)
ret += buf
cur_block_start += self._block_size
# phase 3, block boundary to end
buf = self._get_containing_block(cur_block_start)
s = 0
e = end_block_index or self._block_size
ret += buf[0:e]
return ret
def __len__(self):
return self._size
@staticmethod
def test():
from cStringIO import StringIO
f = StringIO("0123abcd4567efgh")
buf = FileMap(f, block_size=4, cache_size=2)
assert len(buf) == 16
assert buf[0] == "0"
assert buf[1] == "1"
assert buf[0:2] == "01"
assert buf[4] == "a"
assert buf[5] == "b"
assert buf[4:6] == "ab"
assert buf[2:6] == "23ab"
assert buf[0:8] == "0123abcd"
assert buf[0:12] == "0123abcd4567"
assert buf[0:16] == "0123abcd4567efgh"
assert buf[:] == "0123abcd4567efgh"
assert buf[-1] == "h"
assert buf[-2:] == "gh"
assert buf[-4:] == "efgh"
assert buf[-8:] == "4567efgh"
return True
def unpack_from(fmt, buffer, off=0):
"""
Shim struct.unpack_from and divert unpacking of FileMaps.
Otherwise, you'd get an exception like:
TypeError: unpack_from() argument 1 must be convertible to a buffer, not FileMap
So, we extract a true sub-buffer from the FileMap, and feed this
back into the old unpack function.
Theres an extra allocation and copy, but there's no getting
around that.
"""
if not isinstance(buffer, FileMap):
return old_unpack_from(fmt, buffer, off)
size = calcsize(fmt)
buf = buffer[off:off + size]
return old_unpack_from(fmt, buf, 0x0)
def unpack(fmt, string):
"""
Like the shimmed unpack_from, but for struct.unpack.
"""
if not isinstance(buffer, FileMap):
return old_unpack(fmt, string)
size = calcsize(fmt)
buf = string[:size]
return old_unpack(fmt, buf, 0x0)
def struct_test():
from cStringIO import StringIO
f = StringIO("\x04\x03\x02\x01")
buf = FileMap(f)
assert unpack_from("<B", buf, 0x0)[0] == 0x04
assert unpack_from("<H", buf, 0x0)[0] == 0x0304
assert unpack_from("<I", buf, 0x0)[0] == 0x01020304
def test():
if LRUQueue.test():
print "LRUQueue passed tests."
if BoundedLRUQueue.test():
print "BoundedLRUQueue passed tests."
if RangeCache.test():
print "RangeCache passed tests."
if FileMap.test():
print "FileMap passed tests."
if struct_test():
print "struct passed tests."
if __name__ == "__main__":
test()
|
{
"content_hash": "aeb50ac44f647a57b083f34c4685d696",
"timestamp": "",
"source": "github",
"line_count": 465,
"max_line_length": 86,
"avg_line_length": 27.920430107526883,
"alnum_prop": 0.540321959485481,
"repo_name": "ohio813/INDXParse",
"id": "d95cd5f74562e07585c346ec5eac5c2dddcaaad5",
"size": "13002",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FileMap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "278815"
}
],
"symlink_target": ""
}
|
''' pidhandler.py '''
import json
import tornado.web
from heron.shell.src.python import utils
class PidHandler(tornado.web.RequestHandler):
"""
Responsible for getting the process ID for an instance.
"""
# pylint: disable=attribute-defined-outside-init
@tornado.web.asynchronous
def get(self, instance_id):
''' get method '''
self.content_type = 'application/json'
self.write(json.dumps(utils.chain([
['ps', 'auxwwww'],
['grep', instance_id],
['grep', 'java'],
['awk', '\'{print $2}\'']])).strip())
self.finish()
|
{
"content_hash": "bf874b20077fa06974b1ad8723909496",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 57,
"avg_line_length": 26.181818181818183,
"alnum_prop": 0.6267361111111112,
"repo_name": "nlu90/heron",
"id": "da1c5c3e169901b66fd619e9930e1f56e41f891d",
"size": "1426",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "heron/shell/src/python/handlers/pidhandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "14063"
},
{
"name": "C++",
"bytes": "1723598"
},
{
"name": "CSS",
"bytes": "77708"
},
{
"name": "HCL",
"bytes": "5314"
},
{
"name": "HTML",
"bytes": "39414"
},
{
"name": "Java",
"bytes": "4879240"
},
{
"name": "JavaScript",
"bytes": "1107160"
},
{
"name": "M4",
"bytes": "18741"
},
{
"name": "Makefile",
"bytes": "1046"
},
{
"name": "Objective-C",
"bytes": "2143"
},
{
"name": "Perl",
"bytes": "9298"
},
{
"name": "Python",
"bytes": "1693745"
},
{
"name": "Ruby",
"bytes": "1930"
},
{
"name": "Scala",
"bytes": "130046"
},
{
"name": "Shell",
"bytes": "197064"
},
{
"name": "Smarty",
"bytes": "528"
}
],
"symlink_target": ""
}
|
"""
Copyright 2015 Brocade Communications Systems, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import xml.etree.ElementTree as ET
import pynos.versions.base.interface as interface
import pynos.utilities
class TestInterface(unittest.TestCase):
"""
Interface unit tests. Compare expected XML to generated XML.
"""
def setUp(self):
self.interface = interface.Interface(pynos.utilities.return_xml)
self.namespace = 'urn:brocade.com:mgmt:brocade-interface'
self.phys_int_type = 'gigabitethernet'
self.phys_name = '1/0/0'
self.vlan_id = '40'
self.sec_vlan = '50'
self.ipv4_config_namespace = 'urn:brocade.com:mgmt:brocade-ip-config'
self.ipv4_address = '20.10.10.1/24'
self.ipv6_address = 'fc00:1:3:1ad3:0:0:23:a/64'
self.ipv6_config_namespace = 'urn:brocade.com:mgmt:brocade-ipv6-config'
def test_description(self):
expected = '<config>'\
'<interface xmlns="{}"><gigabitethernet>'\
'<name>1/0/0</name>'\
'<description>Hodor</description>'\
'</gigabitethernet></interface>'\
'</config>'.format(self.namespace)
description = 'Hodor'
result = self.interface.description(int_type=self.phys_int_type,
name=self.phys_name,
desc=description)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_description_exception(self):
with self.assertRaises(KeyError):
self.interface.description(int_type=self.phys_int_type,
desc='Hodor')
def test_private_vlan_type(self):
expected = '<config>'\
'<interface-vlan xmlns="{}"><interface>'\
'<vlan><name>40</name><private-vlan>'\
'<pvlan-type-leaf>isolated</pvlan-type-leaf>'\
'</private-vlan></vlan></interface></interface-vlan>'\
'</config>'.format(self.namespace)
result = self.interface.private_vlan_type(name=self.vlan_id,
pvlan_type='isolated')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_private_vlan_type_exception(self):
with self.assertRaises(KeyError):
self.interface.private_vlan_type(name=self.vlan_id)
def test_vlan_pvlan_association_add(self):
expected = '<config><interface-vlan xmlns="{}"><interface><vlan>'\
'<name>{}</name><private-vlan><association>'\
'<sec-assoc-add>{}</sec-assoc-add></association>'\
'</private-vlan></vlan></interface></interface-vlan>'\
'</config>'.format(self.namespace,
self.vlan_id,
self.sec_vlan)
interface = self.interface
result = interface.vlan_pvlan_association_add(name=self.vlan_id,
sec_vlan=self.sec_vlan)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_vlan_pvlan_association_add_exception(self):
with self.assertRaises(KeyError):
self.interface.vlan_pvlan_association_add(name=self.vlan_id)
def test_pvlan_host_association(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><private-vlan><host-association>'\
'<host-pri-pvlan>{3}</host-pri-pvlan>'\
'<host-sec-pvlan>{4}</host-sec-pvlan></host-association>'\
'</private-vlan></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id,
self.sec_vlan)
interface = self.interface
result = interface.pvlan_host_association(int_type=self.phys_int_type,
name=self.phys_name,
pri_vlan=self.vlan_id,
sec_vlan=self.sec_vlan)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_pvlan_host_association_exception(self):
with self.assertRaises(KeyError):
self.interface.pvlan_host_association(name=self.vlan_id)
def test_admin_state_disabled(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<shutdown /></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.admin_state(int_type=self.phys_int_type,
name=self.phys_name,
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_admin_state_enabled(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<shutdown operation="delete" /></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.admin_state(int_type=self.phys_int_type,
name=self.phys_name,
enabled=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_admin_state_exception(self):
with self.assertRaises(KeyError):
self.interface.admin_state(name=self.phys_name)
def test_trunk_allowed_vlan_add(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><allowed><vlan><add>{3}</add></vlan>'\
'</allowed></trunk></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
action='add')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_remove(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><allowed><vlan><remove>{3}</remove>'\
'</vlan></allowed></trunk></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
action='remove')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_all(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><allowed><vlan><all /></vlan>'\
'</allowed></trunk></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
action='all')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_none(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><allowed><vlan><none /></vlan>'\
'</allowed></trunk></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
action='none')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_add_ctag(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><trunk-vlan-classification><allowed>'\
'<vlan><add><trunk-vlan-id>{3}</trunk-vlan-id>'\
'<trunk-ctag-range>{4}</trunk-ctag-range></add></vlan>'\
'</allowed></trunk-vlan-classification></trunk>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id,
self.sec_vlan)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
ctag=self.sec_vlan,
action='add')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_remove_ctag(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><trunk-vlan-classification><allowed>'\
'<vlan><remove><trunk-vlan-id>{3}</trunk-vlan-id>'\
'<trunk-ctag-range>{4}</trunk-ctag-range></remove></vlan>'\
'</allowed></trunk-vlan-classification></trunk>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id,
self.sec_vlan)
result = self.interface.trunk_allowed_vlan(int_type=self.phys_int_type,
name=self.phys_name,
vlan=self.vlan_id,
ctag=self.sec_vlan,
action='remove')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_allowed_vlan_exception(self):
with self.assertRaises(KeyError):
self.interface.trunk_allowed_vlan(name=self.phys_name)
def test_mtu(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<mtu>1666</mtu></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.mtu(int_type=self.phys_int_type,
name=self.phys_name,
mtu='1666')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_mtu_exception(self):
with self.assertRaises(KeyError):
self.interface.mtu(name=self.phys_name)
def test_spanning_tree_state_phys_enabled(self):
stp_namespace = 'urn:brocade.com:mgmt:brocade-xstp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<spanning-tree xmlns="{3}">'\
'<shutdown operation="delete" /></spanning-tree></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
stp_namespace)
interface = self.interface
result = interface.spanning_tree_state(int_type=self.phys_int_type,
name=self.phys_name,
enabled=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_spanning_tree_state_phys_disabled(self):
stp_namespace = 'urn:brocade.com:mgmt:brocade-xstp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<spanning-tree xmlns="{3}"><shutdown /></spanning-tree>'\
'</{1}></interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
stp_namespace)
interface = self.interface
result = interface.spanning_tree_state(int_type=self.phys_int_type,
name=self.phys_name,
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_spanning_tree_state_vlan_enabled(self):
stp_namespace = 'urn:brocade.com:mgmt:brocade-xstp'
expected = '<config><interface-vlan xmlns="{0}"><interface><vlan>'\
'<name>{1}</name><spanning-tree xmlns="{2}">'\
'<stp-shutdown operation="delete" /></spanning-tree>'\
'</vlan></interface></interface-vlan>'\
'</config>'.format(self.namespace, self.vlan_id,
stp_namespace)
interface = self.interface
result = interface.spanning_tree_state(int_type='vlan',
name=self.vlan_id,
enabled=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_spanning_tree_state_vlan_disabled(self):
stp_namespace = 'urn:brocade.com:mgmt:brocade-xstp'
expected = '<config><interface-vlan xmlns="{0}"><interface><vlan>'\
'<name>{1}</name><spanning-tree xmlns="{2}">'\
'<stp-shutdown /></spanning-tree>'\
'</vlan></interface></interface-vlan>'\
'</config>'.format(self.namespace, self.vlan_id,
stp_namespace)
interface = self.interface
result = interface.spanning_tree_state(int_type='vlan',
name=self.vlan_id,
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_spanning_tree_state_exception(self):
with self.assertRaises(KeyError):
self.interface.spanning_tree_state(name=self.phys_name)
def test_private_vlan_mode(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><mode><private-vlan><host /></private-vlan>'\
'</mode></switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.private_vlan_mode(int_type=self.phys_int_type,
name=self.phys_name,
mode='host')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_private_vlan_mode_exception(self):
with self.assertRaises(KeyError):
self.interface.private_vlan_mode(name=self.vlan_id)
def test_vrrp_vip_ipv4(self):
namespace = 'urn:brocade.com:mgmt:brocade-vrrp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<vrrp xmlns="{3}"><vrid>1</vrid><version>3</version>'\
'<virtual-ip><virtual-ipaddr>10.10.10.10</virtual-ipaddr>'\
'</virtual-ip></vrrp></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, namespace)
result = self.interface.vrrp_vip(int_type=self.phys_int_type, vrid='1',
name=self.phys_name,
vip='10.10.10.10/24')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_vrrp_vip_ipv6(self):
namespace = 'urn:brocade.com:mgmt:brocade-vrrpv3'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ipv6><vrrpv3-group xmlns="{3}"><vrid>1</vrid>'\
'<virtual-ip><virtual-ipaddr>2001::1</virtual-ipaddr>'\
'</virtual-ip></vrrpv3-group></ipv6></gigabitethernet>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
namespace)
result = self.interface.vrrp_vip(int_type=self.phys_int_type, vrid='1',
name=self.phys_name, vip='2001::1/64')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_vrrp_vip_exception(self):
with self.assertRaises(KeyError):
self.interface.vrrp_vip()
def test_vrrp_priority_ipv4(self):
namespace = 'urn:brocade.com:mgmt:brocade-vrrp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<vrrp xmlns="{3}"><vrid>1</vrid><version>3</version>'\
'<priority>50</priority></vrrp></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, namespace)
result = self.interface.vrrp_priority(int_type=self.phys_int_type,
name=self.phys_name,
priority='50', vrid='1',
ip_version='4')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_vrrp_priority_ipv6(self):
namespace = 'urn:brocade.com:mgmt:brocade-vrrpv3'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ipv6><vrrpv3-group xmlns="{3}"><vrid>1</vrid>'\
'<priority>50</priority></vrrpv3-group></ipv6></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name, namespace)
result = self.interface.vrrp_priority(int_type=self.phys_int_type,
name=self.phys_name,
priority='50', vrid='1',
ip_version='6')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_vrrp_priority_exception(self):
with self.assertRaises(KeyError):
self.interface.vrrp_priority()
def test_proxy_arp_enabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-ip-config'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ip><ip-config xmlns="{3}"><proxy-arp /></ip-config></ip>'\
'</{1}></interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
namespace)
result = self.interface.proxy_arp(int_type=self.phys_int_type,
name=self.phys_name, enabled=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_proxy_arp_disabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-ip-config'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ip><ip-config xmlns="{3}">'\
'<proxy-arp operation="delete" /></ip-config></ip>'\
'</{1}></interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
namespace)
result = self.interface.proxy_arp(int_type=self.phys_int_type,
name=self.phys_name, enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_proxy_arp_exception(self):
with self.assertRaises(KeyError):
self.interface.proxy_arp()
def test_lacp_timeout_short(self):
namespace = 'urn:brocade.com:mgmt:brocade-lacp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<lacp xmlns="{3}"><timeout>short</timeout></lacp></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name, namespace)
result = self.interface.lacp_timeout(int_type=self.phys_int_type,
name=self.phys_name,
timeout='short')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_lacp_timeout_long(self):
namespace = 'urn:brocade.com:mgmt:brocade-lacp'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<lacp xmlns="{3}"><timeout>long</timeout></lacp></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name, namespace)
result = self.interface.lacp_timeout(int_type=self.phys_int_type,
name=self.phys_name,
timeout='long')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_lacp_timeout_exception(self):
with self.assertRaises(KeyError):
self.interface.lacp_timeout()
def test_transport_service(self):
expected = '<config><interface-vlan xmlns="{0}"><interface><vlan>'\
'<name>{1}</name><transport-service>1</transport-service>'\
'</vlan></interface></interface-vlan>'\
'</config>'.format(self.namespace, self.vlan_id)
result = self.interface.transport_service(vlan=self.vlan_id,
service_id='1')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_transport_service_exception(self):
with self.assertRaises(KeyError):
self.interface.transport_service()
def test_port_channel_minimum_links(self):
expected = '<config><interface xmlns="{0}"><port-channel><name>'\
'3</name><minimum-links>2</minimum-links></port-channel>'\
'</interface></config>'.format(self.namespace)
result = self.interface.port_channel_minimum_links(name='3',
minimum_links='2')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_port_channel_minimum_links_exception(self):
with self.assertRaises(KeyError):
self.interface.port_channel_minimum_links()
def test_fabric_isl_enabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-fcoe'
expected = '<config><interface xmlns="{0}"><tengigabitethernet><name>'\
'{1}</name><fabric xmlns="{2}"><fabric-isl>'\
'<fabric-isl-enable /></fabric-isl></fabric>'\
'</tengigabitethernet></interface>'\
'</config>'.format(self.namespace, self.phys_name,
namespace)
result = self.interface.fabric_isl(int_type='tengigabitethernet',
name=self.phys_name)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_fabric_isl_disabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-fcoe'
expected = '<config><interface xmlns="{0}"><tengigabitethernet><name>'\
'{1}</name><fabric xmlns="{2}">'\
'<fabric-isl operation="delete"><fabric-isl-enable />'\
'</fabric-isl></fabric></tengigabitethernet></interface>'\
'</config>'.format(self.namespace, self.phys_name,
namespace)
result = self.interface.fabric_isl(int_type='tengigabitethernet',
name=self.phys_name, enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_fabric_isl_exception(self):
with self.assertRaises(KeyError):
self.interface.fabric_isl()
def test_fabric_trunk_enabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-fcoe'
expected = '<config><interface xmlns="{0}"><tengigabitethernet><name>'\
'{1}</name><fabric xmlns="{2}"><fabric-trunk>'\
'<fabric-trunk-enable /></fabric-trunk></fabric>'\
'</tengigabitethernet></interface>'\
'</config>'.format(self.namespace, self.phys_name,
namespace)
result = self.interface.fabric_trunk(int_type='tengigabitethernet',
name=self.phys_name)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_fabric_trunk_disabled(self):
namespace = 'urn:brocade.com:mgmt:brocade-fcoe'
expected = '<config><interface xmlns="{0}"><tengigabitethernet><name>'\
'{1}</name><fabric xmlns="{2}">'\
'<fabric-trunk operation="delete"><fabric-trunk-enable />'\
'</fabric-trunk></fabric></tengigabitethernet></interface>'\
'</config>'.format(self.namespace, self.phys_name,
namespace)
result = self.interface.fabric_trunk(int_type='tengigabitethernet',
name=self.phys_name,
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_fabric_trunk_exception(self):
with self.assertRaises(KeyError):
self.interface.fabric_trunk()
def test_channel_group_brocade(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<channel-group><mode>active</mode><port-int>5</port-int>'\
'<type>brocade</type></channel-group></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.channel_group(int_type=self.phys_int_type,
name=self.phys_name,
mode='active', port_int='5',
channel_type='brocade')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_channel_group_standard(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<channel-group><mode>active</mode><port-int>5</port-int>'\
'<type>standard</type></channel-group></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.channel_group(int_type=self.phys_int_type,
name=self.phys_name,
mode='active', port_int='5',
channel_type='standard')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_channel_group_exception(self):
with self.assertRaises(KeyError):
self.interface.channel_group()
def test_port_channel_vlag_ignore_split_enabled(self):
expected = '<config><interface xmlns="{0}"><port-channel>'\
'<name>5</name><vlag><ignore-split /></vlag>'\
'</port-channel></interface>'\
'</config>'.format(self.namespace)
result = self.interface.port_channel_vlag_ignore_split(name='5',
enabled=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_port_channel_vlag_ignore_split_disabled(self):
expected = '<config><interface xmlns="{0}"><port-channel>'\
'<name>5</name><vlag><ignore-split operation="delete" />'\
'</vlag></port-channel></interface>'\
'</config>'.format(self.namespace)
result = self.interface.port_channel_vlag_ignore_split(name='5',
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_port_channel_vlag_ignore_split_exception(self):
with self.assertRaises(KeyError):
self.interface.port_channel_vlag_ignore_split()
def test_tag_native_vlan_enabled(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><tag><native-vlan /></tag></trunk>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.tag_native_vlan(int_type=self.phys_int_type,
name=self.phys_name)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_tag_native_vlan_disabled(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><trunk><tag>'\
'<native-vlan operation="delete" /></tag></trunk>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.tag_native_vlan(int_type=self.phys_int_type,
name=self.phys_name,
enabled=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_tag_native_vlan_exception(self):
with self.assertRaises(KeyError):
self.interface.tag_native_vlan()
def test_v6_nd_suppress_ra(self):
ra_namespace = 'urn:brocade.com:mgmt:brocade-ipv6-nd-ra'
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ipv6><ipv6-nd-ra xmlns="{3}"><ipv6-intf-cmds><nd>'\
'<suppress-ra><suppress-ra-all /></suppress-ra></nd>'\
'</ipv6-intf-cmds></ipv6-nd-ra></ipv6></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, ra_namespace)
result = self.interface.v6_nd_suppress_ra(int_type=self.phys_int_type,
name=self.phys_name)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_v6_nd_suppress_ra_interface_ve(self):
rbridge_namespace = 'urn:brocade.com:mgmt:brocade-rbridge'
ipv6_namespace = 'urn:brocade.com:mgmt:brocade-ipv6-config'
ra_namespace = 'urn:brocade.com:mgmt:brocade-ipv6-nd-ra'
expected = '<config><rbridge-id xmlns="{0}"><rbridge-id>1'\
'</rbridge-id><interface xmlns="{1}"><ve><name>7</name>'\
'<ipv6 xmlns="{2}"><ipv6-nd-ra xmlns="{3}">'\
'<ipv6-intf-cmds><nd><suppress-ra><suppress-ra-all />'\
'</suppress-ra></nd></ipv6-intf-cmds></ipv6-nd-ra></ipv6>'\
'</ve></interface></rbridge-id>'\
'</config>'.format(rbridge_namespace, self.namespace,
ipv6_namespace, ra_namespace)
result = self.interface.v6_nd_suppress_ra(int_type='ve', name='7')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_v6_nd_suppress_ra_exception(self):
with self.assertRaises(KeyError):
self.interface.tag_native_vlan()
def test_trunk_mode(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><mode><vlan-mode>trunk</vlan-mode></mode>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name)
result = self.interface.trunk_mode(int_type=self.phys_int_type,
name=self.phys_name, mode='trunk')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_trunk_mode_exception(self):
with self.assertRaises(KeyError):
self.interface.trunk_mode()
def test_switchport_pvlan_mapping(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<switchport><private-vlan><mapping><promis-pri-pvlan>'\
'{3}</promis-pri-pvlan><promis-sec-pvlan-range>{4}'\
'</promis-sec-pvlan-range></mapping></private-vlan>'\
'</switchport></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name, self.vlan_id,
self.sec_vlan)
intf = self.interface
result = intf.switchport_pvlan_mapping(int_type=self.phys_int_type,
name=self.phys_name,
pri_vlan=self.vlan_id,
sec_vlan=self.sec_vlan)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_switchport_pvlan_mapping_exception(self):
with self.assertRaises(KeyError):
self.interface.switchport_pvlan_mapping()
def test_ip_address_exception(self):
with self.assertRaises(KeyError):
self.interface.ip_address(name=self.phys_name)
def test_ipv4_address_add(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ip><ip-config xmlns="{3}"><address><address>{4}'\
'</address></address></ip-config></ip></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name,
self.ipv4_config_namespace,
self.ipv4_address)
result = self.interface.ip_address(int_type=self.phys_int_type,
name=self.phys_name,
ip_addr=self.ipv4_address,
delete=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_ipv4_address_delete(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ip><ip-config xmlns="{3}"><address operation="delete">'\
'<address>{4}</address></address></ip-config></ip></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
self.ipv4_config_namespace,
self.ipv4_address)
result = self.interface.ip_address(int_type=self.phys_int_type,
name=self.phys_name,
ip_addr=self.ipv4_address,
delete=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_ipv6_address_add(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ipv6><ipv6-config xmlns="{3}"><address><ipv6-address>'\
'<address>{4}</address></ipv6-address></address>'\
'</ipv6-config></ipv6></{1}></interface>'\
'</config>'.format(self.namespace, self.phys_int_type,
self.phys_name,
self.ipv6_config_namespace,
self.ipv6_address)
result = self.interface.ip_address(int_type=self.phys_int_type,
name=self.phys_name,
ip_addr=self.ipv6_address,
delete=False)
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_ipv6_address_delete(self):
expected = '<config><interface xmlns="{0}"><{1}><name>{2}</name>'\
'<ipv6><ipv6-config xmlns="{3}"><address '\
'operation="delete"><ipv6-address><address>{4}</address>'\
'</ipv6-address></address></ipv6-config></ipv6></{1}>'\
'</interface></config>'.format(self.namespace,
self.phys_int_type,
self.phys_name,
self.ipv6_config_namespace,
self.ipv6_address)
result = self.interface.ip_address(int_type=self.phys_int_type,
name=self.phys_name,
ip_addr=self.ipv6_address,
delete=True)
result = ET.tostring(result)
self.assertEquals(expected, result)
|
{
"content_hash": "0a8c3b5d488fcc774e4658ffae34ad33",
"timestamp": "",
"source": "github",
"line_count": 762,
"max_line_length": 79,
"avg_line_length": 53.0485564304462,
"alnum_prop": 0.4929124508324469,
"repo_name": "SivagnanamCiena/pynos",
"id": "fd8b8a5bdcaa031635d463562c5c0c0990c3fbf3",
"size": "40445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/versions/base/test_interface.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20665905"
}
],
"symlink_target": ""
}
|
from werkzeug.exceptions import HTTPException
from werkzeug.routing import Map, Rule
from spa.wrappers import Request
class App(object):
def __init__(self, urls, settings=None, request_class=None):
self.urls = urls
self.settings = settings
self.map, self.handlers = build_rules(urls)
self.request_class = request_class or Request
def __call__(self, environ, start_response):
try:
wsgi_app = self.get_handler(environ)
resp = wsgi_app(environ, start_response)
except HTTPException as e:
wsgi_app = e
resp = wsgi_app(environ, start_response)
return resp
def get_handler(self, environ):
req = self.request_class(environ)
adapter = self.map.bind_to_environ(environ)
route_name, params = adapter.match()
cls, kwargs = self.handlers[route_name]
return cls(self, req, params, route_name, **kwargs)
def url(self, endpoint, **values):
return self.map.bind('').build(endpoint, values=values)
def build_rules(rules_tuples):
handlers = {}
rules = []
for pat, name, handler, kwargs in [tuple_to_rule(t) for t in rules_tuples]:
rules.append(Rule(pat, endpoint=name))
handlers[name] = handler, kwargs
return Map(rules), handlers
def tuple_to_rule(tpl):
pat, name, handler = tpl[0], tpl[1], tpl[2]
if len(tpl) > 3:
kwargs = tpl[3]
else:
kwargs = {}
return pat, name, handler, kwargs
|
{
"content_hash": "61e992923e85bed72c1bc87872e15c03",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 79,
"avg_line_length": 30.18,
"alnum_prop": 0.6189529489728297,
"repo_name": "btubbs/spa",
"id": "cdf425594b0e91acf6b0c34aaa5be6482f47c1e7",
"size": "1509",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spa/app.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1806"
},
{
"name": "HTML",
"bytes": "37683"
},
{
"name": "JavaScript",
"bytes": "62"
},
{
"name": "Python",
"bytes": "66023"
}
],
"symlink_target": ""
}
|
"""mpeg.py - play MPEG movies in the Vision Egg
This demo uses pygame.movie to draw movies. See also the quicktime.py
demo."""
import os
import VisionEgg
VisionEgg.start_default_logging(); VisionEgg.watch_exceptions()
from VisionEgg.Core import *
from VisionEgg.FlowControl import Presentation, FunctionController, TIME_INDEPENDENT
from VisionEgg.Text import *
from VisionEgg.Textures import *
import sys
import pygame
import pygame.surface, pygame.locals
import pygame.movie
import OpenGL.GL as gl
if len(sys.argv) > 1:
filename = sys.argv[1]
else:
filename = None
screen = get_default_screen()
def quit(dummy_arg=None):
p.parameters.go_duration = (0,'frames')
def keydown(event):
if event.key == pygame.locals.K_ESCAPE:
quit()
if not filename:
text = Text( text = "Error: Use MPEG file as command line argument - Press Esc to quit",
position = (screen.size[0]/2,screen.size[1]),
anchor = 'top',
font_size=24,
color = (1.0,0.0,0.0))
text2 = Text( text = "(If you have a free MPEG to contribute, it could go here.)",
position = (screen.size[0]/2,screen.size[1]/2),
anchor = 'center',
font_size=20,
color = (1.0,1.0,1.0))
viewport = Viewport(screen=screen,
stimuli=[text,text2])
p = Presentation(go_duration=('forever',),
viewports=[viewport],
handle_event_callbacks=[(pygame.locals.QUIT, quit),
(pygame.locals.KEYDOWN, keydown)],
)
p.go()
sys.exit(1)
movie = pygame.movie.Movie(filename)
width, height = movie.get_size()
scale_x = screen.size[0]/float(width)
scale_y = screen.size[1]/float(height)
scale = min(scale_x,scale_y) # maintain aspect ratio
# create pygame surface (buffer to draw uncompressed movie data into)
pygame_surface = pygame.surface.Surface((width,height))
# tell the movie to render onto the surface
movie.set_display( pygame_surface )
# create a texture using this surface as the source of texel data
texture = Texture(pygame_surface)
text = Text( text = "Vision Egg MPEG demo - Press Esc to quit",
position = (screen.size[0]/2,2),
anchor = 'bottom',
color = (1.0,1.0,1.0,1.0))
# Create the instance of TextureStimulus
stimulus = TextureStimulus(texture = texture,
position = (screen.size[0]/2,screen.size[1]/2),
anchor = 'center',
size = (width*scale,height*scale),
mipmaps_enabled = 0,
texture_min_filter=gl.GL_LINEAR)
texture_object = stimulus.parameters.texture.get_texture_object()
def update_movie():
# While movie.play() decompresses the movie to pygame surface
# in a separate thread, this sends the data to OpenGL.
texture_object.put_sub_image( pygame_surface )
viewport = Viewport(screen=screen,
stimuli=[stimulus,text])
p = Presentation(go_duration=('forever',),
viewports=[viewport],
handle_event_callbacks=[(pygame.locals.QUIT, quit),
(pygame.locals.KEYDOWN, keydown)],
)
p.add_controller(None,None,FunctionController(during_go_func=update_movie,
temporal_variables=TIME_INDEPENDENT))
movie.play()
#p.go()
import cProfile,pstats
cProfile.run('p.go()','mpeg_profile')
p = pstats.Stats('mpeg_profile')
p.sort_stats('cumulative')
p.print_stats()
|
{
"content_hash": "fd149cef1c50a3a5780f40696f301d77",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 92,
"avg_line_length": 33.43636363636364,
"alnum_prop": 0.6008700380641653,
"repo_name": "chrox/RealTimeElectrophy",
"id": "44404cab2ce044cc42e9d95f850f69ce18ee4f5b",
"size": "3700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "StimControl/test/mpeg.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "24301"
},
{
"name": "Python",
"bytes": "681188"
},
{
"name": "Shell",
"bytes": "73"
}
],
"symlink_target": ""
}
|
import json
import os
import sys
import signal
from lambda_toolkit.modules.utils import Utils
from lambda_toolkit.modules.lambdacontext import LambdaContext
def signal_handler(signal, frame):
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
class Receiver:
def __init__(self, conf, kwargs):
self.log = conf.log
self.conf = conf
self.sqsname = kwargs['sqsname']
self.projectname = kwargs['projectname']
self.sqs = conf.get_boto3("sqs", "resource")
def collect_receiver(self):
queue = self.sqs.get_queue_by_name(QueueName=self.sqsname)
self.log.info("Importing project " + self.projectname)
pp = os.path.join(Utils.fixpath(self.conf.sett['C_BASE_DIR']),
Utils.fixpath(self.conf.sett['C_LAMBDAS_DIR']),
self.conf.region, self.projectname)
self.log.debug("Using project dir: " + pp)
sys.path.append(pp)
a = __import__("index")
func = getattr(a, "lambda_handler")
self.log.info("Starting the receiver using the queue " + self.sqsname)
if 'variables' in self.conf.projects[self.projectname]:
vars = self.conf.projects[self.projectname]['variables']
for v in vars:
self.log.info("Injecting lambda variable '" + v + "' with value '" + vars[v] + "'.")
os.environ[v] = vars[v]
while True:
try:
sys.stdout.write(".")
sys.stdout.flush()
msg_list = queue.receive_messages(
VisibilityTimeout=int(self.conf.sett['QUEUE_GETMESSAGE_VISIBILITY_TIMEOUT']),
MaxNumberOfMessages=int(self.conf.sett['QUEUE_GETMESSAGE_MAXNUMBEROFMESSAGES']),
WaitTimeSeconds=int(self.conf.sett['QUEUE_GETMESSAGE_WAITTIMESECONDS']))
for msg in msg_list:
jsonmsg = json.loads(msg.body)
self.log.info("=======================================")
self.log.info("* New message. Sending to " + self.projectname)
if func(jsonmsg["event"], LambdaContext(jsonmsg["context"])):
try:
msg.delete()
self.log.info("* Message deleted.")
except Exception as e:
self.log.warn("* Failed to delete the message. Expired.")
self.log.warn("Configured timeout [QUEUE_GETMESSAGE_VISIBILITY_TIMEOUT]: " + str(self.conf.sett[
'QUEUE_GETMESSAGE_VISIBILITY_TIMEOUT']))
else:
self.log.info("* Project " + self.projectname + " returned False. Keeping message in the queue.")
self.log.info("=======================================")
except Exception as a:
print(a)
|
{
"content_hash": "6de32c655300c83892b8b5f370c55de5",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 124,
"avg_line_length": 42.2,
"alnum_prop": 0.534529451591063,
"repo_name": "lucioveloso/lambda-toolkit",
"id": "c3d20740113e1a0547fab0254c7496656b347c4b",
"size": "2977",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "lambda_toolkit/modules/receiver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "1423"
},
{
"name": "Python",
"bytes": "48091"
},
{
"name": "Shell",
"bytes": "557"
}
],
"symlink_target": ""
}
|
from greenlet import greenlet
#=========================================================================
# InQueuePortProxy
#=========================================================================
class InQueuePortProxy (object):
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( s, in_ ):
s.in_ = in_
s.trace = " "
#-----------------------------------------------------------------------
# popleft
#-----------------------------------------------------------------------
def popleft( s ):
# Set the rdy signal
s.in_.rdy.next = 1
s.trace = "+"
# Yield so we wait at least one cycle for the response
greenlet.getcurrent().parent.switch(0)
# If input interface is not valid then yield
while not s.in_.val:
s.trace = ":"
greenlet.getcurrent().parent.switch(0)
# Input interface is valid so reset rdy signal and return message
s.trace = " "
s.in_.rdy.next = 0
return s.in_.msg
#-----------------------------------------------------------------------
# line_trace
#-----------------------------------------------------------------------
def line_trace( s ):
return s.trace
#=========================================================================
# OutQueuePortProxy
#=========================================================================
class OutQueuePortProxy (object):
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( s, out ):
s.out = out
s.trace = " "
#-----------------------------------------------------------------------
# append
#-----------------------------------------------------------------------
def append( s, msg ):
# Set the val signal and message
s.out.msg.next = msg
s.out.val.next = 1
s.trace = "+"
# Yield so we wait at least one cycle for the rdy
greenlet.getcurrent().parent.switch(0)
# If output interface is not ready then yield
while not s.out.rdy:
s.trace = ":"
greenlet.getcurrent().parent.switch(0)
# Output interface is ready so reset val signal
s.trace = " "
s.out.val.next = 0
#-----------------------------------------------------------------------
# line_trace
#-----------------------------------------------------------------------
def line_trace( s ):
return s.trace
|
{
"content_hash": "65c59258ded9804ece8160261f0427ce",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 74,
"avg_line_length": 25.8,
"alnum_prop": 0.3209302325581395,
"repo_name": "tj93/pymtl",
"id": "af5536398f8c92b8d33701742b5d4586960420e5",
"size": "3091",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "pclib/fl/QueuePortProxy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "102180"
},
{
"name": "C",
"bytes": "73982"
},
{
"name": "Coq",
"bytes": "2684"
},
{
"name": "Makefile",
"bytes": "1121"
},
{
"name": "Python",
"bytes": "1262276"
},
{
"name": "Shell",
"bytes": "20607"
},
{
"name": "Verilog",
"bytes": "5919"
}
],
"symlink_target": ""
}
|
import serial
ser = serial.Serial('/dev/ttyUSB0') # Open serial port
# Write a string
ser.write("Hello World!".encode())
# Read a eight bytes (changable)
msg = ser.read(8)
|
{
"content_hash": "1a1e16b84a390882138cccc23c1f5e10",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 54,
"avg_line_length": 19.444444444444443,
"alnum_prop": 0.6971428571428572,
"repo_name": "FlatTargetInk/ECE562-Software",
"id": "36e3f372aafa6a4332404db363027ceff874a5b9",
"size": "249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "serial_example.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "68619"
}
],
"symlink_target": ""
}
|
import random
from templates.text import TextTemplate
def process(input):
gn = [
'Night, dude!',
'Good night, dude! Sleep tight! :P'
]
output = {
'input': input,
'output': TextTemplate(random.choice(gn)).get_message(),
'success': True
}
return output
|
{
"content_hash": "4c9029408ace814847de9eb8395c9d1f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 64,
"avg_line_length": 20.866666666666667,
"alnum_prop": 0.5686900958466453,
"repo_name": "manparvesh/BotDude",
"id": "255c6b165b0c8ef9157fc041b37e13b888452a70",
"size": "313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/src/gn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14226"
}
],
"symlink_target": ""
}
|
'''
The MIT License (MIT)
Copyright (c) 2016 Sean UN Wood
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
@author: Sean UN Wood
'''
|
{
"content_hash": "3a8c4e897983f6a70f428f8f02e934d6",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 44.36,
"alnum_prop": 0.7953110910730388,
"repo_name": "seanwood/gcc-nmf",
"id": "90b78d9454ff792af01fa9da6ce741f3d0a260ff",
"size": "1109",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gccNMF/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "129346"
}
],
"symlink_target": ""
}
|
"""
Copyright 2010 Sami Dalouche
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import cherrypy
import os
GLOBAL_CONFIG_FILENAME = 'pymager-cherrypy.conf'
PYMAGER_CONFIG_FILENAME = 'pymager.conf'
_app_config = None
def app_config():
return _app_config
class ConfigFileNotFoundError(Exception):
def __init(self, directories):
self.directories = directories
def __str__(self):
return self.directories.__str__()
def config_directories(caller_directory):
"""
In order, we will look for config files in :
# - ./ : useful when running pymager from /opt/pymager,
for instance : each instance has its own config file
# - ./etc
# - caller_directory : Useful when running as WSGI, where the
config file is in the same directory as the WSGI script
# - caller_directory/etc
# - /etc/pymager, /opt/local/etc/pymager
@param caller_filename: pass __file__
"""
config_directories = []
# ./etc
try:
config_directories.append(os.path.join(os.getcwd(), 'etc'))
config_directories.append(os.getcwd())
except OSError, ose:
if ose.errno != 2:
raise
# OSError: [Errno 2] No such file or directory. cwd doesn't exist
# $SCRIPTDIR
config_directories.append(caller_directory)
config_directories.append(os.path.join(caller_directory, 'etc'))
# /etc, /opt/local/etc, ..
config_directories.extend(['/etc/pymager', '/opt/local/etc/pymager'])
return config_directories
def parse_config(current_python_filename, filename):
"""
@param current_python_filename: always pass __file__ !!!
"""
confdirs = config_directories(os.path.dirname(current_python_filename))
for confdir in confdirs:
try:
parsed_config = cherrypy._cpconfig._Parser().dict_from_file(os.path.join(confdir, filename))
_enforce_default_config(parsed_config)
return parsed_config
except IOError, e:
pass
raise ConfigFileNotFoundError(confdirs)
def _enforce_default_config(parsed_config):
root_config = parsed_config['/'] if '/' in parsed_config else {}
root_config['tools.trailing_slash.on'] = False
root_config['request.dispatch'] = cherrypy.dispatch.MethodDispatcher()
#root_config['request.process_request_body'] = False
parsed_config['/'] = root_config
def set_app_config(app_config):
global _app_config
_app_config = app_config
|
{
"content_hash": "4484550c021c160db89dd8bd2bcd13e7",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 104,
"avg_line_length": 33.10989010989011,
"alnum_prop": 0.6637902422834384,
"repo_name": "pymager/pymager",
"id": "a629b21c6cefee9fa04b2ff49d8dba929d92ad8d",
"size": "3013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymager/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "154279"
},
{
"name": "Shell",
"bytes": "208"
}
],
"symlink_target": ""
}
|
from core.himesis import Himesis
import uuid
class HAttribute(Himesis):
def __init__(self):
"""
Creates the himesis graph representing the DSLTrans rule Attribute.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HAttribute, self).__init__(name='HAttribute', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = ['HimesisMM']
self["name"] = """Attribute"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'Attribute')
# match model. We only support one match model
self.add_node()
self.vs[0]["mm__"] = """MatchModel"""
# apply model node
self.add_node()
self.vs[1]["mm__"] = """ApplyModel"""
# paired with relation between match and apply models
self.add_node()
self.vs[2]["mm__"] = """paired_with"""
# match class Attribute() node
self.add_node()
self.vs[3]["mm__"] = """Attribute"""
self.vs[3]["attr1"] = """+"""
# match_contains node for class Attribute()
self.add_node()
self.vs[4]["mm__"] = """match_contains"""
# apply class Attribute() node
self.add_node()
self.vs[5]["mm__"] = """Attribute"""
self.vs[5]["attr1"] = """1"""
# apply_contains node for class Attribute()
self.add_node()
self.vs[6]["mm__"] = """apply_contains"""
# Add the edges
self.add_edges([
(0,4), # matchmodel -> match_contains
(4,3), # match_contains -> match_class Attribute()
(1,6), # applymodel -> apply_contains
(6,5), # apply_contains -> apply_class Attribute()
(0,2), # matchmodel -> pairedwith
(2,1) # pairedwith -> applyModel
])
# Add the attribute equations
self["equations"] = [((5,'name'),(3,'name')), ((5,'type'),(3,'type')), ]
|
{
"content_hash": "e66b8d563e2d2a7b027fa7dfe1b0c861",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 82,
"avg_line_length": 27.71794871794872,
"alnum_prop": 0.46762257169287696,
"repo_name": "levilucio/SyVOLT",
"id": "65500c0cc6328f5379d1663322ea31123e67a364",
"size": "2162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ER_Copier_MM/transformation/HAttribute.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "166159"
},
{
"name": "Python",
"bytes": "34207588"
},
{
"name": "Shell",
"bytes": "1118"
}
],
"symlink_target": ""
}
|
import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
class YearMixin:
"""Mixin for views manipulating year-based data."""
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"""Return the year for which this view should display data."""
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_("No year specified"))
return year
def get_next_year(self, date):
"""Get the next valid year."""
return _get_next_prev(self, date, is_previous=False, period='year')
def get_previous_year(self, date):
"""Get the previous valid year."""
return _get_next_prev(self, date, is_previous=True, period='year')
def _get_next_year(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
def _get_current_year(self, date):
"""Return the start date of the current interval."""
return date.replace(month=1, day=1)
class MonthMixin:
"""Mixin for views manipulating month-based data."""
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"""Return the month for which this view should display data."""
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_("No month specified"))
return month
def get_next_month(self, date):
"""Get the next valid month."""
return _get_next_prev(self, date, is_previous=False, period='month')
def get_previous_month(self, date):
"""Get the previous valid month."""
return _get_next_prev(self, date, is_previous=True, period='month')
def _get_next_month(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
if date.month == 12:
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
else:
return date.replace(month=date.month + 1, day=1)
def _get_current_month(self, date):
"""Return the start date of the previous interval."""
return date.replace(day=1)
class DayMixin:
"""Mixin for views manipulating day-based data."""
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"""Return the day for which this view should display data."""
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_("No day specified"))
return day
def get_next_day(self, date):
"""Get the next valid day."""
return _get_next_prev(self, date, is_previous=False, period='day')
def get_previous_day(self, date):
"""Get the previous valid day."""
return _get_next_prev(self, date, is_previous=True, period='day')
def _get_next_day(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=1)
def _get_current_day(self, date):
"""Return the start date of the current interval."""
return date
class WeekMixin:
"""Mixin for views manipulating week-based data."""
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"""Return the week for which this view should display data."""
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_("No week specified"))
return week
def get_next_week(self, date):
"""Get the next valid week."""
return _get_next_prev(self, date, is_previous=False, period='week')
def get_previous_week(self, date):
"""Get the previous valid week."""
return _get_next_prev(self, date, is_previous=True, period='week')
def _get_next_week(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date + datetime.timedelta(days=7 - self._get_weekday(date))
except OverflowError:
raise Http404(_("Date out of range"))
def _get_current_week(self, date):
"""Return the start date of the current interval."""
return date - datetime.timedelta(self._get_weekday(date))
def _get_weekday(self, date):
"""
Return the weekday for a given date.
The first day according to the week format is 0 and the last day is 6.
"""
week_format = self.get_week_format()
if week_format == '%W': # week starts on Monday
return date.weekday()
elif week_format == '%U': # week starts on Sunday
return (date.weekday() + 1) % 7
else:
raise ValueError("unknown week format: %s" % week_format)
class DateMixin:
"""Mixin class for views manipulating date-based data."""
date_field = None
allow_future = False
def get_date_field(self):
"""Get the name of the date field to be used to filter by."""
if self.date_field is None:
raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Return `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
@cached_property
def uses_datetime_field(self):
"""
Return `True` if the date field is a `DateTimeField` and `False`
if it's a `DateField`.
"""
model = self.get_queryset().model if self.model is None else self.model
field = model._meta.get_field(self.get_date_field())
return isinstance(field, models.DateTimeField)
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value)
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""Abstract base class for date-based views displaying a list of objects."""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(
object_list=self.object_list,
date_list=self.date_list,
**extra_context
)
return self.render_to_response(context)
def get_dated_items(self):
"""Obtain the list of dates and items."""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset; use the
date field by default.
"""
return '-%s' % self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = timezone.now() if self.uses_datetime_field else timezone_today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = not qs if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or
'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
raise Http404(
_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': queryset.model._meta.verbose_name_plural,
}
)
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items. Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""Top-level archive of date-based items."""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""List of objects published in a given year."""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""List of objects published in a given year."""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""List of objects published in a given month."""
date_list_period = 'day'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""List of objects published in a given month."""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""List of objects published in a given week."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""List of objects published in a given week."""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""List of objects published on a given day."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""List of objects published on a given day."""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""List of objects published today."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""List of objects published today."""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""Get the object this request displays."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False."
) % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super().get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Get a datetime.date object given a format string and a year, month, and day
(only year is mandatory). Raise a 404 for an invalid date.
"""
format = year_format + delim + month_format + delim + day_format
datestr = str(year) + delim + str(month) + delim + str(day)
try:
return datetime.datetime.strptime(datestr, format).date()
except ValueError:
raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Get the next or the previous valid date. The idea is to allow links on
month/day views to never be 404s by never providing a date that'll be
invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= timezone_today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = timezone.now()
else:
now = timezone_today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = timezone.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
def timezone_today():
"""Return the current date in the current time zone."""
if settings.USE_TZ:
return timezone.localdate()
else:
return datetime.date.today()
|
{
"content_hash": "13fcc66803aadcc0156cbfa727672c29",
"timestamp": "",
"source": "github",
"line_count": 720,
"max_line_length": 103,
"avg_line_length": 35.016666666666666,
"alnum_prop": 0.5940028557829605,
"repo_name": "treyhunner/django",
"id": "3da05cf8e5bb879a66f5b4cba40e774f6331e0fd",
"size": "25212",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "django/views/generic/dates.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "84168"
},
{
"name": "HTML",
"bytes": "224612"
},
{
"name": "JavaScript",
"bytes": "255642"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "12359346"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
"""Tests for common layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.layers import common_layers
import tensorflow as tf
class CommonLayersTest(tf.test.TestCase):
def testSaturatingSigmoid(self):
x = np.array([-120.0, -100.0, 0.0, 100.0, 120.0], dtype=np.float32)
with self.test_session() as session:
y = common_layers.saturating_sigmoid(tf.constant(x))
res = session.run(y)
self.assertAllClose(res, [0.0, 0.0, 0.5, 1.0, 1.0])
def testFlatten4D3D(self):
x = np.random.random_integers(1, high=8, size=(3, 5, 2))
with self.test_session() as session:
y = common_layers.flatten4d3d(common_layers.embedding(x, 10, 7))
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (3, 5 * 2, 7))
def testEmbedding(self):
x = np.random.random_integers(1, high=8, size=(3, 5))
with self.test_session() as session:
y = common_layers.embedding(x, 10, 16)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (3, 5, 16))
def testShakeShake(self):
x = np.random.rand(5, 7)
with self.test_session() as session:
x = tf.constant(x, dtype=tf.float32)
y = common_layers.shakeshake([x, x, x, x, x])
session.run(tf.global_variables_initializer())
inp, res = session.run([x, y])
self.assertAllClose(res, inp)
def testConv(self):
x = np.random.rand(5, 7, 1, 11)
with self.test_session() as session:
y = common_layers.conv(tf.constant(x, dtype=tf.float32), 13, (3, 1))
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 5, 1, 13))
def testConv1d(self):
x = np.random.rand(5, 7, 11)
with self.test_session() as session:
y = common_layers.conv1d(tf.constant(x, dtype=tf.float32), 13, 1)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 13))
def testSeparableConv(self):
x = np.random.rand(5, 7, 1, 11)
with self.test_session() as session:
y = common_layers.separable_conv(
tf.constant(x, dtype=tf.float32), 13, (3, 1))
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 5, 1, 13))
def testSubSeparableConv(self):
for sep in [0, 1, 2, 4]:
x = np.random.rand(5, 7, 1, 12)
with self.test_session() as session:
with tf.variable_scope("sep_%d" % sep):
y = common_layers.subseparable_conv(
tf.constant(x, dtype=tf.float32), 16, (3, 1), separability=sep)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 5, 1, 16))
def testConvBlock(self):
x = np.random.rand(5, 7, 1, 11)
with self.test_session() as session:
y = common_layers.conv_block(
tf.constant(x, dtype=tf.float32),
13, [(1, (3, 3)), (1, (3, 3))],
padding="SAME",
normalizer_fn=common_layers.noam_norm)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 1, 13))
def testSeparableConvBlock(self):
x = np.random.rand(5, 7, 1, 11)
with self.test_session() as session:
y = common_layers.separable_conv_block(
tf.constant(x, dtype=tf.float32),
13, [(1, (3, 3)), (1, (3, 3))],
padding="SAME")
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 1, 13))
def testSubSeparableConvBlock(self):
for sep in [0, 1, 2, 4]:
x = np.random.rand(5, 7, 1, 12)
with self.test_session() as session:
with tf.variable_scope("sep_%d" % sep):
y = common_layers.subseparable_conv_block(
tf.constant(x, dtype=tf.float32),
16, [(1, (3, 3)), (1, (3, 3))],
padding="SAME",
separability=sep)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 1, 16))
def testPool(self):
x = np.random.rand(5, 8, 1, 11)
with self.test_session() as session:
y = common_layers.pool(
tf.constant(x, dtype=tf.float32), (2, 2), "AVG", "SAME")
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 8, 1, 11))
def testConvBlockDownsample(self):
x = np.random.rand(5, 7, 1, 11)
with self.test_session() as session:
y = common_layers.conv_block_downsample(
tf.constant(x, dtype=tf.float32), (3, 1), (2, 1), "SAME")
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 4, 1, 27))
def testSimpleAttention(self):
x = np.random.rand(5, 7, 1, 11)
y = np.random.rand(5, 9, 1, 11)
with self.test_session() as session:
a = common_layers.simple_attention(
tf.constant(x, dtype=tf.float32), tf.constant(y, dtype=tf.float32))
session.run(tf.global_variables_initializer())
res = session.run(a)
self.assertEqual(res.shape, (5, 7, 1, 11))
def testGetTimingSignal(self):
length = 7
num_timescales = 10
with self.test_session() as session:
a = common_layers.get_timing_signal(length, num_timescales=num_timescales)
session.run(tf.global_variables_initializer())
res = session.run(a)
self.assertEqual(res.shape, (length, 2 * num_timescales))
def testAddTimingSignal(self):
batch = 5
length = 7
height = 3
depth = 35
x = np.random.rand(batch, length, height, depth)
with self.test_session() as session:
a = common_layers.add_timing_signal(tf.constant(x, dtype=tf.float32))
session.run(tf.global_variables_initializer())
res = session.run(a)
self.assertEqual(res.shape, (batch, length, height, depth))
def testAttention1D(self):
batch = 5
target_length = 7
source_length = 13
source_depth = 9
target_depth = 11
attention_size = 21
output_size = 15
num_heads = 7
source = np.random.rand(batch, source_length, source_depth)
target = np.random.rand(batch, target_length, target_depth)
mask = np.random.rand(batch, target_length, source_length)
with self.test_session() as session:
a = common_layers.attention_1d_v0(
tf.constant(source, dtype=tf.float32),
tf.constant(target, dtype=tf.float32), attention_size, output_size,
num_heads, tf.constant(mask, dtype=tf.float32))
session.run(tf.global_variables_initializer())
res = session.run(a)
self.assertEqual(res.shape, (batch, target_length, output_size))
def testMultiscaleConvSum(self):
x = np.random.rand(5, 9, 1, 11)
with self.test_session() as session:
y = common_layers.multiscale_conv_sum(
tf.constant(x, dtype=tf.float32),
13, [((1, 1), (5, 5)), ((2, 2), (3, 3))],
"AVG",
padding="SAME")
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 9, 1, 13))
def testConvGRU(self):
x = np.random.rand(5, 7, 3, 11)
with self.test_session() as session:
y = common_layers.conv_gru(tf.constant(x, dtype=tf.float32), (1, 3), 11)
z = common_layers.conv_gru(
tf.constant(x, dtype=tf.float32), (1, 3), 11, padding="LEFT")
session.run(tf.global_variables_initializer())
res1 = session.run(y)
res2 = session.run(z)
self.assertEqual(res1.shape, (5, 7, 3, 11))
self.assertEqual(res2.shape, (5, 7, 3, 11))
def testLayerNorm(self):
x = np.random.rand(5, 7, 11)
with self.test_session() as session:
y = common_layers.layer_norm(tf.constant(x, dtype=tf.float32), 11)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 11))
def testConvLSTM(self):
x = np.random.rand(5, 7, 11, 13)
with self.test_session() as session:
y = common_layers.conv_lstm(tf.constant(x, dtype=tf.float32), (1, 3), 13)
session.run(tf.global_variables_initializer())
res = session.run(y)
self.assertEqual(res.shape, (5, 7, 11, 13))
def testPadToSameLength(self):
x1 = np.random.rand(5, 7, 11)
x2 = np.random.rand(5, 9, 11)
with self.test_session() as session:
a, b = common_layers.pad_to_same_length(
tf.constant(x1, dtype=tf.float32), tf.constant(x2, dtype=tf.float32))
c, d = common_layers.pad_to_same_length(
tf.constant(x1, dtype=tf.float32),
tf.constant(x2, dtype=tf.float32),
final_length_divisible_by=4)
res1, res2 = session.run([a, b])
res1a, res2a = session.run([c, d])
self.assertEqual(res1.shape, (5, 9, 11))
self.assertEqual(res2.shape, (5, 9, 11))
self.assertEqual(res1a.shape, (5, 12, 11))
self.assertEqual(res2a.shape, (5, 12, 11))
def testShiftLeft(self):
x1 = np.zeros((5, 7, 1, 11))
x1[:, 0, :] = np.ones_like(x1[:, 0, :])
expected = np.zeros((5, 7, 1, 11))
expected[:, 1, :] = np.ones_like(expected[:, 1, :])
with self.test_session() as session:
a = common_layers.shift_right(tf.constant(x1, dtype=tf.float32))
actual = session.run(a)
self.assertAllEqual(actual, expected)
def testConvStride2MultiStep(self):
x1 = np.random.rand(5, 32, 16, 11)
with self.test_session() as session:
a = common_layers.conv_stride2_multistep(
tf.constant(x1, dtype=tf.float32), 4, 16)
session.run(tf.global_variables_initializer())
actual = session.run(a[0])
self.assertEqual(actual.shape, (5, 2, 1, 16))
def testDeconvStride2MultiStep(self):
x1 = np.random.rand(5, 2, 1, 11)
with self.test_session() as session:
a = common_layers.deconv_stride2_multistep(
tf.constant(x1, dtype=tf.float32), 4, 16)
session.run(tf.global_variables_initializer())
actual = session.run(a)
self.assertEqual(actual.shape, (5, 32, 1, 16))
def testApplyNormLayer(self):
with self.test_session() as session:
x1 = np.random.rand(5, 2, 1, 11)
x2 = common_layers.apply_norm(
tf.constant(x1, dtype=tf.float32), "layer", depth=11, epsilon=1e-6)
session.run(tf.global_variables_initializer())
actual = session.run(x2)
self.assertEqual(actual.shape, (5, 2, 1, 11))
def testApplyNormNoam(self):
with self.test_session() as session:
x1 = np.random.rand(5, 2, 1, 11)
x2 = common_layers.apply_norm(
tf.constant(x1, dtype=tf.float32), "noam", depth=11, epsilon=1e-6)
session.run(tf.global_variables_initializer())
actual = session.run(x2)
self.assertEqual(actual.shape, (5, 2, 1, 11))
def testApplyNormBatch(self):
with self.test_session() as session:
x1 = np.random.rand(5, 2, 1, 11)
x2 = common_layers.apply_norm(
tf.constant(x1, dtype=tf.float32), "batch", depth=11, epsilon=1e-6)
session.run(tf.global_variables_initializer())
actual = session.run(x2)
self.assertEqual(actual.shape, (5, 2, 1, 11))
def testApplyNormNone(self):
with self.test_session() as session:
x1 = np.random.rand(5, 2, 1, 11)
x2 = common_layers.apply_norm(
tf.constant(x1, dtype=tf.float32), "none", depth=11, epsilon=1e-6)
session.run(tf.global_variables_initializer())
actual = session.run(x2)
self.assertEqual(actual.shape, (5, 2, 1, 11))
self.assertAllClose(actual, x1, atol=1e-03)
def testGlobalPool1d(self):
x1 = np.random.rand(5, 4, 11)
no_mask = np.ones((5, 4))
full_mask = np.zeros((5, 4))
with self.test_session() as session:
x1_ = tf.Variable(x1, dtype=tf.float32)
no_mask_ = tf.Variable(no_mask, dtype=tf.float32)
full_mask_ = tf.Variable(full_mask, dtype=tf.float32)
none_mask_max = common_layers.global_pool_1d(x1_)
no_mask_max = common_layers.global_pool_1d(x1_, mask=no_mask_)
result1 = tf.reduce_sum(none_mask_max - no_mask_max)
full_mask_max = common_layers.global_pool_1d(x1_, mask=full_mask_)
result2 = tf.reduce_sum(full_mask_max)
none_mask_avr = common_layers.global_pool_1d(x1_, "AVR")
no_mask_avr = common_layers.global_pool_1d(x1_, "AVR", no_mask_)
result3 = tf.reduce_sum(none_mask_avr - no_mask_avr)
full_mask_avr = common_layers.global_pool_1d(x1_, "AVR", full_mask_)
result4 = tf.reduce_sum(full_mask_avr)
session.run(tf.global_variables_initializer())
actual = session.run([result1, result2, result3, result4])
self.assertAllEqual(actual[:3], [0.0, 0.0, 0.0])
def testLinearSetLayer(self):
x1 = np.random.rand(5, 4, 11)
cont = np.random.rand(5, 13)
with self.test_session() as session:
x1_ = tf.Variable(x1, dtype=tf.float32)
cont_ = tf.Variable(cont, dtype=tf.float32)
simple_ff = common_layers.linear_set_layer(32, x1_)
cont_ff = common_layers.linear_set_layer(32, x1_, context=cont_)
session.run(tf.global_variables_initializer())
actual = session.run([simple_ff, cont_ff])
self.assertEqual(actual[0].shape, (5, 4, 32))
self.assertEqual(actual[1].shape, (5, 4, 32))
def testRavanbakhshSetLayer(self):
x1 = np.random.rand(5, 4, 11)
with self.test_session() as session:
x1_ = tf.Variable(x1, dtype=tf.float32)
layer = common_layers.ravanbakhsh_set_layer(32, x1_)
session.run(tf.global_variables_initializer())
actual = session.run(layer)
self.assertEqual(actual.shape, (5, 4, 32))
def testPaddingCrossEntropyFactored(self):
vocab_size = 19
rows = 5
cols = 4
depth = 11
label_smoothing = 0.1
features = np.random.rand(rows, cols, depth)
weights = np.random.rand(vocab_size, depth)
labels = np.random.randint(0, vocab_size - 1, size=(rows, cols))
with self.test_session() as session:
features = tf.to_float(features)
weights = tf.to_float(weights)
labels = tf.to_int32(labels)
logits = tf.matmul(
tf.reshape(features, [rows * cols, depth]), weights, transpose_b=True)
logits = tf.reshape(logits, [rows, cols, vocab_size])
loss_num, loss_den = common_layers.padded_cross_entropy(
logits, labels, label_smoothing=label_smoothing, reduce_sum=False)
factored_logits = common_layers.FactoredTensor(features, weights)
loss_num_f, loss_den_f = common_layers.padded_cross_entropy_factored(
factored_logits,
labels=labels,
label_smoothing=label_smoothing,
reduce_sum=False)
num, den, num_f, den_f = session.run(
[loss_num, loss_den, loss_num_f, loss_den_f])
self.assertEqual(num.shape, (rows, cols))
self.assertEqual(den.shape, (rows, cols))
self.assertEqual(num_f.shape, (rows, cols))
self.assertEqual(den_f.shape, (rows, cols))
self.assertAllClose(num, num_f)
self.assertAllClose(den, den_f)
def testPaddingCrossEntropyFactoredGrad(self):
vocab_size = 19
rows = 5
cols = 4
depth = 11
label_smoothing = 0.1
features = np.random.rand(rows, cols, depth)
weights = np.random.rand(vocab_size, depth)
labels = np.random.randint(0, vocab_size - 1, size=(rows, cols))
with self.test_session() as session:
features = tf.to_float(features)
weights = tf.to_float(weights)
labels = tf.to_int32(labels)
logits = tf.matmul(
tf.reshape(features, [rows * cols, depth]), weights, transpose_b=True)
logits = tf.reshape(logits, [rows, cols, vocab_size])
loss_num, loss_den = common_layers.padded_cross_entropy(
logits, labels, label_smoothing=label_smoothing, reduce_sum=False)
factored_logits = common_layers.FactoredTensor(features, weights)
loss_num_factored, loss_den_factored = (
common_layers.padded_cross_entropy_factored(
factored_logits,
labels=labels,
label_smoothing=label_smoothing,
reduce_sum=False))
df, dw = tf.gradients(ys=[loss_num, loss_den], xs=[features, weights])
df_factored, dw_factored = tf.gradients(
ys=[loss_num_factored, loss_den_factored], xs=[features, weights])
actual_df, actual_dw, actual_df_factored, actual_dw_factored = (
session.run([df, dw, df_factored, dw_factored]))
self.assertEqual(actual_df.shape, (rows, cols, depth))
self.assertEqual(actual_dw.shape, (vocab_size, depth))
self.assertEqual(actual_df_factored.shape, (rows, cols, depth))
self.assertEqual(actual_dw_factored.shape, (vocab_size, depth))
self.assertAllClose(actual_df, actual_df_factored)
self.assertAllClose(actual_dw, actual_dw_factored)
def testFactoredTensorImplicitConversion(self):
a = np.random.rand(3, 4, 5)
b = np.random.rand(6, 5)
c = np.random.rand(3, 4, 6)
with self.test_session() as session:
# a factored representation of a Tensor of shape (3, 4, 6)
factored = common_layers.FactoredTensor(tf.to_float(a), tf.to_float(b))
# implicitly converts factored to a Tensor (performing the matmul)
d = factored + tf.to_float(c)
out = session.run(d)
self.assertEqual(out.shape, (3, 4, 6))
def testConvHiddenReluMemoryEfficient(self):
batch = 3
length = 23
io_size = 16
filter_size = 7
x = np.random.rand(batch, length, io_size)
dy = np.random.rand(batch, length, io_size)
with self.test_session() as session:
x = tf.to_float(x)
dy = tf.to_float(dy)
f1 = tf.get_variable("f1", [1, io_size, filter_size])
f2 = tf.get_variable("f2", [1, filter_size, io_size])
norm_scale, norm_bias = common_layers.layer_norm_vars(io_size)
y = common_layers.conv_hidden_relu_memory_efficient(
x, filter_size, forget=False,
test_vars=(f1, f2, norm_scale, norm_bias))
y_forget = common_layers.conv_hidden_relu_memory_efficient(
x, filter_size, forget=True,
test_vars=(f1, f2, norm_scale, norm_bias))
dx, df1, df2, dnorm_scale, dnorm_bias = tf.gradients(
ys=[y], xs=[x, f1, f2, norm_scale, norm_bias], grad_ys=[dy])
dx_f, df1_f, df2_f, dnorm_scale_f, dnorm_bias_f = tf.gradients(
ys=[y_forget], xs=[x, f1, f2, norm_scale, norm_bias], grad_ys=[dy])
session.run(tf.global_variables_initializer())
(y, y_forget,
dx, df1, df2, dnorm_scale, dnorm_bias,
dx_f, df1_f, df2_f, dnorm_scale_f, dnorm_bias_f) = session.run(
[y, y_forget,
dx, df1, df2, dnorm_scale, dnorm_bias,
dx_f, df1_f, df2_f, dnorm_scale_f, dnorm_bias_f])
self.assertAllClose(y, y_forget)
self.assertAllClose(df2, df2_f)
self.assertAllClose(df1, df1_f)
self.assertAllClose(dnorm_scale, dnorm_scale_f)
self.assertAllClose(dnorm_bias, dnorm_bias_f)
self.assertAllClose(dx, dx_f)
class FnWithCustomGradTest(tf.test.TestCase):
def testCorrectness(self):
w = tf.random_uniform([6, 10])
def fn(a, b, c):
return tf.layers.dense(
a,
10,
use_bias=False,
kernel_initializer=lambda shape, dtype, partition_info: w
) + tf.matmul(b, c)
def grad_fn(inputs, variables, outputs, grad_outputs):
outputs = outputs[0]
grad_outputs = grad_outputs[0]
grad_inputs = tf.gradients(outputs, inputs, grad_ys=grad_outputs)
grad_vars = tf.gradients(outputs, variables, grad_ys=grad_outputs)
return grad_inputs, grad_vars
custom_fn = common_layers.fn_with_custom_grad(grad_fn)(fn)
a = tf.random_uniform([11, 6])
b = tf.random_uniform([11, 7])
c = tf.random_uniform([7, 10])
out = fn(a, b, c)
custom_out = custom_fn(a, b, c)
self.assertEqual(out.get_shape().as_list(),
custom_out.get_shape().as_list())
loss = tf.reduce_mean(out)
custom_loss = tf.reduce_mean(custom_out)
grads = tf.gradients(loss, [a, b, c] + [tf.trainable_variables()[0]])
custom_grads = tf.gradients(custom_loss,
[a, b, c] + [tf.trainable_variables()[1]])
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
out_val, custom_out_val, grads_val, custom_grads_val = sess.run(
[out, custom_out, grads, custom_grads])
self.assertAllClose(out_val, custom_out_val)
for g1, g2 in zip(grads_val, custom_grads_val):
self.assertAllClose(g1, g2)
def testCustomGrad(self):
def fn(a, b, c):
return tf.layers.dense(a, 10, use_bias=False) + tf.matmul(b, c)
def grad_fn(inputs, variables, unused_outputs, unused_grad_outputs):
grad_inputs = [tf.ones_like(t) * (i + 1.) for i, t in enumerate(inputs)]
grad_vars = [
tf.ones_like(t) * (i + len(inputs) + 1.)
for i, t in enumerate(variables)
]
return grad_inputs, grad_vars
a = tf.random_uniform([11, 6])
b = tf.random_uniform([11, 7])
c = tf.random_uniform([7, 10])
w = tf.random_uniform([6, 10])
out = common_layers.fn_with_custom_grad(grad_fn)(fn)(a, b, c)
loss = tf.reduce_mean(out)
grads = tf.gradients(loss, [a, b, c, tf.trainable_variables()[0]])
expected_grads = [
tf.ones_like(t) * (i + 1.) for i, t in enumerate([a, b, c, w])
]
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
g_val, eg_val = sess.run([grads, expected_grads])
for g1, g2 in zip(g_val, eg_val):
self.assertAllClose(g1, g2)
class RecomputeTest(tf.test.TestCase):
def testRecompute(self):
def layer(x, name=None):
with tf.variable_scope(name, default_name="layer"):
x = tf.contrib.layers.layer_norm(x)
x = tf.layers.conv1d(
x,
10,
1,
use_bias=False,
kernel_initializer=tf.constant_initializer(42.42))
x = tf.nn.relu(x)
return x
def fn(x):
out = x
for _ in range(3):
out = layer(out)
return out
@common_layers.recompute_grad
def fn_recompute(x):
return fn(x)
x = tf.random_uniform((3, 1, 3))
recompute_vars = None
with tf.variable_scope("recompute") as vs:
out1 = tf.reduce_sum(fn_recompute(x))
recompute_vars = vs.trainable_variables()
reg_vars = None
with tf.variable_scope("regular") as vs:
out2 = tf.reduce_sum(fn(x))
reg_vars = vs.trainable_variables()
grad1 = tf.gradients(out1, recompute_vars)
grad2 = tf.gradients(out2, reg_vars)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
outs = sess.run([out1, out2, grad1, grad2])
self.assertAllClose(outs[0], outs[1])
for g1, g2 in zip(outs[2], outs[3]):
self.assertAllClose(g1, g2)
if __name__ == "__main__":
tf.test.main()
|
{
"content_hash": "cdedfd35fefaa7d8c1fd872b5ef3dadb",
"timestamp": "",
"source": "github",
"line_count": 611,
"max_line_length": 80,
"avg_line_length": 37.734860883797054,
"alnum_prop": 0.6215735600277585,
"repo_name": "rsepassi/tensor2tensor",
"id": "2bf6b4cee4ec6e7b9bd20d06ea1b206e3da90de3",
"size": "23662",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensor2tensor/layers/common_layers_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "34646"
},
{
"name": "JavaScript",
"bytes": "78396"
},
{
"name": "Jupyter Notebook",
"bytes": "2328225"
},
{
"name": "Python",
"bytes": "1702690"
},
{
"name": "Shell",
"bytes": "1260"
}
],
"symlink_target": ""
}
|
from setuptools import setup
exec(open('chemblnet/version.py').read())
setup(name='chemblnet',
version=__version__,
description='Neural Networks for ChEMBL',
url='http://github.com/jaak-s/chemblnet',
author='Jaak Simm',
author_email='jaak.simm@gmail.com',
license='MIT',
packages=['chemblnet'],
zip_safe=False)
|
{
"content_hash": "6ed21726948a3abffec878f1945bc298",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 47,
"avg_line_length": 25.857142857142858,
"alnum_prop": 0.638121546961326,
"repo_name": "jaak-s/chemblnet",
"id": "616f961ef4f463602a26c333979d8e52320dad04",
"size": "362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "170352"
},
{
"name": "Shell",
"bytes": "355"
}
],
"symlink_target": ""
}
|
"""Contains the InputSpec class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import zip # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.keras import backend
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import keras_export
from tensorflow.python.util.tf_export import tf_export
@keras_export('keras.layers.InputSpec')
@tf_export(v1=['layers.InputSpec'])
class InputSpec(object):
"""Specifies the rank, dtype and shape of every input to a layer.
Layers can expose (if appropriate) an `input_spec` attribute:
an instance of `InputSpec`, or a nested structure of `InputSpec` instances
(one per input tensor). These objects enable the layer to run input
compatibility checks for input structure, input rank, input shape, and
input dtype.
A None entry in a shape is compatible with any dimension,
a None shape is compatible with any shape.
Arguments:
dtype: Expected DataType of the input.
shape: Shape tuple, expected shape of the input
(may include None for unchecked axes). Includes the batch size.
ndim: Integer, expected rank of the input.
max_ndim: Integer, maximum rank of the input.
min_ndim: Integer, minimum rank of the input.
axes: Dictionary mapping integer axes to
a specific dimension value.
allow_last_axis_squeeze: If True, then allow inputs of rank N+1 as long
as the last axis of the input is 1, as well as inputs of rank N-1
as long as the last axis of the spec is 1.
name: Expected key corresponding to this input when passing data as
a dictionary.
Example:
```python
class MyLayer(Layer):
def __init__(self):
super(MyLayer, self).__init__()
# The layer will accept inputs with shape (?, 28, 28) & (?, 28, 28, 1)
# and raise an appropriate error message otherwise.
self.input_spec = InputSpec(
shape=(None, 28, 28, 1),
allow_last_axis_squeeze=True)
```
"""
def __init__(self,
dtype=None,
shape=None,
ndim=None,
max_ndim=None,
min_ndim=None,
axes=None,
allow_last_axis_squeeze=False,
name=None):
self.dtype = dtypes.as_dtype(dtype).name if dtype is not None else None
shape = tensor_shape.TensorShape(shape)
if shape.rank is None:
shape = None
else:
shape = tuple(shape.as_list())
if shape is not None:
self.ndim = len(shape)
self.shape = shape
else:
self.ndim = ndim
self.shape = None
self.max_ndim = max_ndim
self.min_ndim = min_ndim
self.name = name
self.allow_last_axis_squeeze = allow_last_axis_squeeze
try:
axes = axes or {}
self.axes = {int(k): axes[k] for k in axes}
except (ValueError, TypeError):
raise TypeError('The keys in axes must be integers.')
if self.axes and (self.ndim is not None or self.max_ndim is not None):
max_dim = (self.ndim if self.ndim else self.max_ndim) - 1
max_axis = max(self.axes)
if max_axis > max_dim:
raise ValueError('Axis {} is greater than the maximum allowed value: {}'
.format(max_axis, max_dim))
def __repr__(self):
spec = [('dtype=' + str(self.dtype)) if self.dtype else '',
('shape=' + str(self.shape)) if self.shape else '',
('ndim=' + str(self.ndim)) if self.ndim else '',
('max_ndim=' + str(self.max_ndim)) if self.max_ndim else '',
('min_ndim=' + str(self.min_ndim)) if self.min_ndim else '',
('axes=' + str(self.axes)) if self.axes else '']
return 'InputSpec(%s)' % ', '.join(x for x in spec if x)
def get_config(self):
return {
'dtype': self.dtype,
'shape': self.shape,
'ndim': self.ndim,
'max_ndim': self.max_ndim,
'min_ndim': self.min_ndim,
'axes': self.axes}
@classmethod
def from_config(cls, config):
return cls(**config)
def to_tensor_shape(spec):
"""Returns a tf.TensorShape object that matches the shape specifications.
If the InputSpec's shape or ndim is defined, this method will return a fully
or partially-known shape. Otherwise, the returned TensorShape is None.
Args:
spec: an InputSpec object.
Returns:
a tf.TensorShape object
"""
if spec.ndim is None and spec.shape is None:
return tensor_shape.TensorShape(None)
elif spec.shape is not None:
return tensor_shape.TensorShape(spec.shape)
else:
shape = [None] * spec.ndim
for a in spec.axes:
shape[a] = spec.axes[a] # Assume that axes is defined
return tensor_shape.TensorShape(shape)
def assert_input_compatibility(input_spec, inputs, layer_name):
"""Checks compatibility between the layer and provided inputs.
This checks that the tensor(s) `inputs` verify the input assumptions
of a layer (if any). If not, a clear and actional exception gets raised.
Arguments:
input_spec: An InputSpec instance, list of InputSpec instances, a nested
structure of InputSpec instances, or None.
inputs: Input tensor, list of input tensors, or a nested structure of
input tensors.
layer_name: String, name of the layer (for error message formatting).
Raises:
ValueError: in case of mismatch between
the provided inputs and the expectations of the layer.
"""
if not input_spec:
return
input_spec = nest.flatten(input_spec)
if isinstance(inputs, dict):
# Flatten `inputs` by reference order if input spec names are provided
names = [spec.name for spec in input_spec]
if all(names):
list_inputs = []
for name in names:
if name not in inputs:
raise ValueError('Missing data for input "%s". '
'You passed a data dictionary with keys %s. '
'Expected the following keys: %s' %
(name, list(inputs.keys()), names))
list_inputs.append(inputs[name])
inputs = list_inputs
inputs = nest.flatten(inputs)
for x in inputs:
# Having a shape/dtype is the only commonality of the various tensor-like
# objects that may be passed. The most common kind of invalid type we are
# guarding for is a Layer instance (Functional API), which does not
# have a `shape` attribute.
if not hasattr(x, 'shape'):
raise TypeError('Inputs to a layer should be tensors. Got: %s' % (x,))
if len(inputs) != len(input_spec):
raise ValueError('Layer ' + layer_name + ' expects ' +
str(len(input_spec)) + ' input(s), '
'but it received ' + str(len(inputs)) +
' input tensors. Inputs received: ' + str(inputs))
for input_index, (x, spec) in enumerate(zip(inputs, input_spec)):
if spec is None:
continue
shape = tensor_shape.TensorShape(x.shape)
if shape.rank is None:
return
# Check ndim.
if spec.ndim is not None and not spec.allow_last_axis_squeeze:
ndim = shape.rank
if ndim != spec.ndim:
raise ValueError('Input ' + str(input_index) + ' of layer ' +
layer_name + ' is incompatible with the layer: '
'expected ndim=' + str(spec.ndim) + ', found ndim=' +
str(ndim) + '. Full shape received: ' +
str(tuple(shape)))
if spec.max_ndim is not None:
ndim = x.shape.rank
if ndim is not None and ndim > spec.max_ndim:
raise ValueError('Input ' + str(input_index) + ' of layer ' +
layer_name + ' is incompatible with the layer: '
'expected max_ndim=' + str(spec.max_ndim) +
', found ndim=' + str(ndim))
if spec.min_ndim is not None:
ndim = x.shape.rank
if ndim is not None and ndim < spec.min_ndim:
raise ValueError('Input ' + str(input_index) + ' of layer ' +
layer_name + ' is incompatible with the layer: '
': expected min_ndim=' + str(spec.min_ndim) +
', found ndim=' + str(ndim) +
'. Full shape received: ' +
str(tuple(shape)))
# Check dtype.
if spec.dtype is not None:
if x.dtype.name != spec.dtype:
raise ValueError('Input ' + str(input_index) + ' of layer ' +
layer_name + ' is incompatible with the layer: '
'expected dtype=' + str(spec.dtype) +
', found dtype=' + str(x.dtype))
# Check specific shape axes.
shape_as_list = shape.as_list()
if spec.axes:
for axis, value in spec.axes.items():
if hasattr(value, 'value'):
value = value.value
if value is not None and shape_as_list[int(axis)] not in {value, None}:
raise ValueError(
'Input ' + str(input_index) + ' of layer ' + layer_name + ' is'
' incompatible with the layer: expected axis ' + str(axis) +
' of input shape to have value ' + str(value) +
' but received input with shape ' + display_shape(x.shape))
# Check shape.
if spec.shape is not None and shape.rank is not None:
spec_shape = spec.shape
if spec.allow_last_axis_squeeze:
if shape_as_list and shape_as_list[-1] == 1:
shape_as_list = shape_as_list[:-1]
if spec_shape and spec_shape[-1] == 1:
spec_shape = spec_shape[:-1]
for spec_dim, dim in zip(spec_shape, shape_as_list):
if spec_dim is not None and dim is not None:
if spec_dim != dim:
raise ValueError('Input ' + str(input_index) +
' is incompatible with layer ' + layer_name +
': expected shape=' + str(spec.shape) +
', found shape=' + display_shape(x.shape))
def display_shape(shape):
return str(tuple(shape.as_list()))
def to_tensor_spec(input_spec, default_dtype=None):
"""Converts a Keras InputSpec object to a TensorSpec."""
default_dtype = default_dtype or backend.floatx()
if isinstance(input_spec, InputSpec):
dtype = input_spec.dtype or default_dtype
return tensor_spec.TensorSpec(to_tensor_shape(input_spec), dtype)
return tensor_spec.TensorSpec(None, default_dtype)
|
{
"content_hash": "91c60b81fc9e3890438d7529e6f09cf6",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 80,
"avg_line_length": 39.25735294117647,
"alnum_prop": 0.6062933133545608,
"repo_name": "cxxgtxy/tensorflow",
"id": "52a2829ffdb85919dd739303a90a19026c8b850a",
"size": "11402",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/engine/input_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7481"
},
{
"name": "C",
"bytes": "186817"
},
{
"name": "C++",
"bytes": "24882047"
},
{
"name": "CMake",
"bytes": "164374"
},
{
"name": "Go",
"bytes": "854846"
},
{
"name": "HTML",
"bytes": "564161"
},
{
"name": "Java",
"bytes": "307246"
},
{
"name": "Jupyter Notebook",
"bytes": "1833659"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37393"
},
{
"name": "Objective-C",
"bytes": "7037"
},
{
"name": "Objective-C++",
"bytes": "64142"
},
{
"name": "Protocol Buffer",
"bytes": "225621"
},
{
"name": "Python",
"bytes": "22009999"
},
{
"name": "Shell",
"bytes": "341543"
},
{
"name": "TypeScript",
"bytes": "797437"
}
],
"symlink_target": ""
}
|
from base64 import b64encode
import gzip
import sys
import urllib
try:
import urllib2
except ImportError:
import urllib.request as urllib2
import hmac
import os
import time
import socket
import logging
from hashlib import sha256
try:
from cStringIO import StringIO
except ImportError:
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
from urllib import quote as urllib_quote
except ImportError:
# Python 3
from urllib.parse import quote as urllib_quote
unicode = str
# Python 2.4 compatibility
# http://code.google.com/p/boto/source/detail?r=1011
if sys.version[:3] == "2.4":
# we are using an hmac that expects a .new() method.
class Faker:
def __init__(self, which):
self.which = which
self.digest_size = self.which().digest_size
def new(self, *args, **kwargs):
return self.which(*args, **kwargs)
sha256 = Faker(sha256)
try:
from exceptions import Exception
except ImportError:
pass
SERVICE_DOMAINS = {
'CA': ('ecs.amazonaws.ca', 'xml-ca.amznxslt.com'),
'CN': ('webservices.amazon.cn', 'xml-cn.amznxslt.com'),
'DE': ('ecs.amazonaws.de', 'xml-de.amznxslt.com'),
'ES': ('webservices.amazon.es', 'xml-es.amznxslt.com'),
'FR': ('ecs.amazonaws.fr', 'xml-fr.amznxslt.com'),
'IN': ('webservices.amazon.in', 'xml-in.amznxslt.com'),
'IT': ('webservices.amazon.it', 'xml-it.amznxslt.com'),
'JP': ('ecs.amazonaws.jp', 'xml-jp.amznxslt.com'),
'UK': ('ecs.amazonaws.co.uk', 'xml-uk.amznxslt.com'),
'US': ('ecs.amazonaws.com', 'xml-us.amznxslt.com'),
}
log = logging.getLogger(__name__)
def _quote_query(query):
"""Turn a dictionary into a query string in a URL, with keys
in alphabetical order."""
return "&".join("%s=%s" % (
k, urllib_quote(
unicode(query[k]).encode('utf-8'), safe='~'))
for k in sorted(query))
class AmazonError(Exception):
pass
class AmazonCall(object):
def __init__(self, AWSAccessKeyId=None, AWSSecretAccessKey=None,
AssociateTag=None, Operation=None, Version=None, Region=None,
Timeout=None, MaxQPS=None, Parser=None,
CacheReader=None, CacheWriter=None,
ErrorHandler=None,
_last_query_time=None):
self.AWSAccessKeyId = (AWSAccessKeyId or
os.environ.get('AWS_ACCESS_KEY_ID'))
self.AWSSecretAccessKey = (AWSSecretAccessKey or
os.environ.get('AWS_SECRET_ACCESS_KEY'))
self.AssociateTag = (AssociateTag or
os.environ.get('AWS_ASSOCIATE_TAG'))
self.CacheReader = CacheReader
self.CacheWriter = CacheWriter
self.ErrorHandler = ErrorHandler
self.MaxQPS = MaxQPS
self.Operation = Operation
self.Parser = Parser
self.Version = Version
self.Region = Region
self.Timeout = Timeout
# put this in a list so it can be shared between instances
self._last_query_time = _last_query_time or [None]
def signed_request(self):
pass
def __getattr__(self, k):
try:
return object.__getattr__(self, k)
except:
return AmazonCall(self.AWSAccessKeyId, self.AWSSecretAccessKey,
self.AssociateTag,
Operation=k, Version=self.Version,
Region=self.Region, Timeout=self.Timeout,
MaxQPS=self.MaxQPS, Parser=self.Parser,
CacheReader=self.CacheReader,
CacheWriter=self.CacheWriter,
ErrorHandler=self.ErrorHandler,
_last_query_time=self._last_query_time)
def _maybe_parse(self, response_text):
if self.Parser:
return self.Parser(response_text)
else:
return response_text
def api_url(self, **kwargs):
"""The URL for making the given query against the API."""
query = {
'Operation': self.Operation,
'Service': "AWSECommerceService",
'Timestamp': time.strftime(
"%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
'Version': self.Version,
}
query.update(kwargs)
query['AWSAccessKeyId'] = self.AWSAccessKeyId
query['Timestamp'] = time.strftime("%Y-%m-%dT%H:%M:%SZ",
time.gmtime())
if self.AssociateTag:
query['AssociateTag'] = self.AssociateTag
service_domain = SERVICE_DOMAINS[self.Region][0]
quoted_strings = _quote_query(query)
data = "GET\n" + service_domain + "\n/onca/xml\n" + quoted_strings
if sys.version_info[0] == 3:
digest = hmac.new(
bytes(self.AWSSecretAccessKey, encoding='utf-8'),
bytes(data, encoding='utf-8'), sha256).digest()
signature = urllib.parse.quote(b64encode(digest))
else:
digest = hmac.new(self.AWSSecretAccessKey, data, sha256).digest()
signature = urllib.quote(b64encode(digest))
return ("http://" + service_domain + "/onca/xml?" +
quoted_strings + "&Signature=%s" % signature)
def cache_url(self, **kwargs):
"""A simplified URL to be used for caching the given query."""
query = {
'Operation': self.Operation,
'Service': "AWSECommerceService",
'Version': self.Version,
}
query.update(kwargs)
service_domain = SERVICE_DOMAINS[self.Region][0]
return "http://" + service_domain + "/onca/xml?" + _quote_query(query)
def _call_api(self, api_url, err_env):
"""urlopen(), plus error handling and possible retries.
err_env is a dict of additional info passed to the error handler
"""
while True: # may retry on error
api_request = urllib2.Request(
api_url, headers={"Accept-Encoding": "gzip"})
log.debug("Amazon URL: %s" % api_url)
try:
if self.Timeout and sys.version[:3] in ["2.4", "2.5"]:
# urllib2.urlopen() doesn't accept timeout until 2.6
old_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(self.Timeout)
return urllib2.urlopen(api_request)
finally:
socket.setdefaulttimeout(old_timeout)
else:
# the simple way
return urllib2.urlopen(api_request, timeout=self.Timeout)
except:
if not self.ErrorHandler:
raise
exception = sys.exc_info()[1] # works in Python 2 and 3
err = {'exception': exception}
err.update(err_env)
if not self.ErrorHandler(err):
raise
def __call__(self, **kwargs):
if 'Style' in kwargs:
raise AmazonError("The `Style` parameter has been discontinued by"
" AWS. Please remove all references to it and"
" reattempt your request.")
cache_url = self.cache_url(**kwargs)
if self.CacheReader:
cached_response_text = self.CacheReader(cache_url)
if cached_response_text is not None:
return self._maybe_parse(cached_response_text)
api_url = self.api_url(**kwargs)
# throttle ourselves if need be
if self.MaxQPS:
last_query_time = self._last_query_time[0]
if last_query_time:
wait_time = 1 / self.MaxQPS - (time.time() - last_query_time)
if wait_time > 0:
log.debug('Waiting %.3fs to call Amazon API' % wait_time)
time.sleep(wait_time)
self._last_query_time[0] = time.time()
# make the actual API call
response = self._call_api(api_url,
{'api_url': api_url, 'cache_url': cache_url})
# decompress the response if need be
if sys.version_info[0] == 3:
if "gzip" in response.info().get("Content-Encoding"):
response_text = gzip.decompress(response.read())
else:
response_text = response.read()
else:
if "gzip" in response.info().getheader("Content-Encoding"):
gzipped_file = gzip.GzipFile(fileobj=StringIO(response.read()))
response_text = gzipped_file.read()
else:
response_text = response.read()
# write it back to the cache
if self.CacheWriter:
self.CacheWriter(cache_url, response_text)
# parse and return it
return self._maybe_parse(response_text)
class Amazon(AmazonCall):
def __init__(self, AWSAccessKeyId=None, AWSSecretAccessKey=None,
AssociateTag=None, Operation=None, Version="2011-08-01",
Region="US", Timeout=None, MaxQPS=None, Parser=None,
CacheReader=None, CacheWriter=None, ErrorHandler=None):
"""Create an Amazon API object.
AWSAccessKeyId: Your AWS Access Key, sent with API queries. If not
set, will be automatically read from the environment
variable $AWS_ACCESS_KEY_ID
AWSSecretAccessKey: Your AWS Secret Key, used to sign API queries. If
not set, will be automatically read from the
environment variable $AWS_SECRET_ACCESS_KEY
AssociateTag: Your "username" for the Amazon Affiliate program,
sent with API queries.
Version: API version. The default should work
Region: ccTLD you want to search for products on (e.g. 'UK'
for amazon.co.uk). Must be uppercase. Default is 'US'.
Timeout: optional timeout for queries
MaxQPS: optional maximum queries per second. If we've made an API call
on this object more recently that 1/MaxQPS, we'll wait
before making the call. Useful for making batches of queries.
You generally want to set this a little lower than the
max (so 0.9, not 1.0).
Parser: a function that takes the raw API response (XML in a
bytestring) and returns a more convenient object of
your choice; if set, API calls will pass the response through
this
CacheReader: Called before attempting to make an API call.
A function that takes a single argument, the URL that
would be passed to the API, minus auth information,
and returns a cached version of the (unparsed) response,
or None
CacheWriter: Called after a successful API call. A function that
takes two arguments, the same URL passed to
CacheReader, and the (unparsed) API response.
ErrorHandler: Called after an unsuccessful API call, with a
dictionary containing these values:
exception: the exception (an HTTPError or URLError)
api_url: the url called
cache_url: the url used for caching purposes
(see CacheReader above)
If this returns true, the call will be retried
(you generally want to wait some time before
returning, in this case)
"""
# Operation is for internal use by AmazonCall.__getattr__()
AmazonCall.__init__(self, AWSAccessKeyId, AWSSecretAccessKey,
AssociateTag, Operation, Version=Version,
Region=Region, Timeout=Timeout,
MaxQPS=MaxQPS, Parser=Parser,
CacheReader=CacheReader,
CacheWriter=CacheWriter,
ErrorHandler=ErrorHandler)
__all__ = ["Amazon", "AmazonError"]
|
{
"content_hash": "e7acd609501b9993677983c765f7962c",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 79,
"avg_line_length": 38.422360248447205,
"alnum_prop": 0.5590042030391206,
"repo_name": "AkihikoITOH/capybara",
"id": "60224a4fdfb29668da2f0207dc2303b904c0b589",
"size": "12372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "capybara/virtualenv/lib/python2.7/site-packages/bottlenose/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "44245"
},
{
"name": "CSS",
"bytes": "6111"
},
{
"name": "Groff",
"bytes": "89"
},
{
"name": "HTML",
"bytes": "530"
},
{
"name": "JavaScript",
"bytes": "6345"
},
{
"name": "Python",
"bytes": "4240093"
},
{
"name": "Shell",
"bytes": "3855"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
}
|
if __name__ == '__main__':
from qtmpl import main
raise SystemExit(main())
from matplotlib import use
#force matplotlib to use wx with the use('wx') call before importing pyplot
use('wx')
from matplotlib import pyplot
from matplotlib.backends import backend_wx
from twisted.internet import wxreactor
#force Twisted to use wx with the wxreactor.install() call before importing reactor
wxreactor.install()
from twisted.internet import reactor, task
import numpy as np
import time
from matplotlib.pylab import subplots,close
from matplotlib import cm
def randomwalk(dims=(256,256),n=20,sigma=5,alpha=0.95,seed=1):
""" A simple random walk with memory """
r,c = dims
gen = np.random.RandomState(seed)
pos = gen.rand(2,n)*((r,),(c,))
old_delta = gen.randn(2,n)*sigma
while 1:
delta = (1.-alpha)*gen.randn(2,n)*sigma + alpha*old_delta
pos += delta
for ii in xrange(n):
if not (0. <= pos[0,ii] < r) : pos[0,ii] = abs(pos[0,ii] % r)
if not (0. <= pos[1,ii] < c) : pos[1,ii] = abs(pos[1,ii] % c)
old_delta = delta
yield pos
#replaced the call to pyplot.show() with a call to my own Show subclass with a mainloop
class TwistedWxShow(backend_wx.Show):
running = False
def mainloop(self):
if not self.running:
self.running = True
#starts the wx mainloop and the Twisted mainloop
reactor.run()
prev_time = time.time()
fig, ax = subplots(1,1)
ax.set_aspect('equal')
ax.set_xlim(0,255)
ax.set_ylim(0,255)
ax.hold(True)
rw = randomwalk()
x,y = rw.next()
fig.canvas.draw()
# cache the background
#background = fig.canvas.copy_from_bbox(ax.bbox)
plt = ax.plot(x,y,'o')[0]
tic = time.time()
random_gen = np.random.mtrand.RandomState(seed=127260)
def main():
def proof():
global prev_time, rw, fig, ax, plt
x,y = rw.next()
plt.set_data(x,y)
# restore background
fig.canvas.restore_region(background)
# redraw just the points
ax.draw_artist(plt)
# fill in the axes rectangle
fig.canvas.blit(ax.bbox)
now = time.time()
if prev_time:
print 'Twisted!', now - prev_time
prev_time = now
task.LoopingCall(proof).start(0.1)
TwistedWxShow()()
|
{
"content_hash": "35aee27702070683a1de73ccf883307f",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 87,
"avg_line_length": 24.806451612903224,
"alnum_prop": 0.6276549631556133,
"repo_name": "opikalo/pyfire",
"id": "3987c95024fc6d8eee9100bf5dc5cd637e65e125",
"size": "2436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/qtmpl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "5911954"
},
{
"name": "Python",
"bytes": "149683"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
}
|
AUTO_SCROLL_TO_ERROR = "auto_scroll_to_error"
XML_CATALOG_FILES = "xml_catalog_files"
|
{
"content_hash": "5e367fd25a56453af3924a5193a4b5cf",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 45,
"avg_line_length": 43,
"alnum_prop": 0.7441860465116279,
"repo_name": "eerohele/exalt",
"id": "ce3f1084bde85c31aba512d67ea9d52307333d92",
"size": "86",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37583"
}
],
"symlink_target": ""
}
|
'''
Perform Granger based causality analysis using Generalized Parital Directed
Coherence on example dataset.
Uses the data and example from mne-python combined with the Scot package
to perform the Granger Causality analysis.
Author: Praveen Sripad <pravsripad@gmail.com>
'''
import numpy as np
from scipy import stats
import mne
from mne.datasets import sample
from mne.minimum_norm import apply_inverse_epochs, read_inverse_operator
from jumeg.jumeg_utils import get_jumeg_path
from jumeg.connectivity.causality import (compute_order, do_mvar_evaluation,
prepare_causality_matrix)
from jumeg.connectivity import (plot_grouped_connectivity_circle,
plot_grouped_causality_circle)
import scot
import scot.connectivity_statistics as scs
from scot.connectivity import connectivity
import yaml
import time
t_start = time.time()
print(('Scot version -', scot.__version__))
yaml_fname = get_jumeg_path() + '/data/desikan_aparc_cortex_based_grouping.yaml'
labels_fname = get_jumeg_path() + '/data/desikan_label_names.yaml'
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_raw = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
fname_event = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
# Load data
inverse_operator = read_inverse_operator(fname_inv)
raw = mne.io.read_raw_fif(fname_raw)
events = mne.read_events(fname_event)
# Add a bad channel
raw.info['bads'] += ['MEG 2443']
# Pick MEG channels
picks = mne.pick_types(raw.info, meg=True, eeg=False, stim=False, eog=True,
exclude='bads')
# Define epochs for left-auditory condition
event_id, tmin, tmax = 1, -0.2, 0.5
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=dict(mag=4e-12, grad=4000e-13,
eog=150e-6))
if not epochs.preload:
epochs.load_data()
# parameters, lots of parameters
snr = 1.0
lambda2 = 1.0 / snr ** 2
method = "MNE" # use MNE method (could also be MNE or sLORETA)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, method,
pick_ori="normal", return_generator=True)
# Get labels for FreeSurfer 'aparc' cortical parcellation with 34 labels/hemi
labels = mne.read_labels_from_annot('sample', parc='aparc',
subjects_dir=subjects_dir)
label_colors = [label.color for label in labels]
# Average the source estimates within each label using sign-flips to reduce
# signal cancellations, also here we return a generator
src = inverse_operator['src']
label_ts = mne.extract_label_time_course(stcs, labels, src, mode='mean_flip',
return_generator=False)
label_ts_ = np.array(label_ts)
bands = ['alpha']
freqs = [(8, 13)]
gcmethod = 'GPDC'
n_surr = 1 # number of surrogates
surr_thresh = 95 # percentile of surr threshold used
n_jobs = 1
nfft = 512
# normalize the representative ts
print('\nperform normalization using zscoring...')
label_ts = stats.zscore(label_ts_, axis=2)
morder = 15 # set fixed model order
# set this to find the optimal model order using the BIC criterion
# be advised, this takes a long time !!
# morder, bic = compute_order(label_ts, m_max=100) # code provided by Qunxi
# print('the model order based on BIC is..', morder)
# evaluate the chosen model order
print(('\nShape of label_ts -', label_ts.shape))
# mvar needs (trials, channels, samples)
print(('\nRunning for model order - ', morder))
thr_cons, whit_min, whit_max = 0.8, 1., 3.
is_white, consistency, is_stable = do_mvar_evaluation(label_ts, morder,
whit_max, whit_min,
thr_cons)
print(('model_order, whiteness, consistency, stability: %d, %s, %f, %s\n'
% (morder, str(is_white), consistency, str(is_stable))))
# compute the Granger Partial Directed Coherence values
print('computing GPDC connectivity...')
mvar = scot.var.VAR(morder)
# result : array, shape (`repeats`, n_channels, n_channels, nfft)
surr = scs.surrogate_connectivity(gcmethod, label_ts, mvar, nfft=nfft,
n_jobs=n_jobs, repeats=n_surr)
mvar.fit(label_ts)
# mvar coefficients (n_channels, n_channels * model_order)
# mvar covariance matrix (n_channels, n_channels)
# result : array, shape (n_channels, n_channels, `nfft`)
cau = connectivity(gcmethod, mvar.coef, mvar.rescov, nfft=nfft)
# get the band averaged, thresholded connectivity matrix
caus, max_cons, max_surrs = prepare_causality_matrix(
cau, surr, freqs, nfft=nfft,
sfreq=epochs.info['sfreq'], surr_thresh=surr_thresh)
print(('Shape of causality matrix: ', caus.shape))
# read the label names used for plotting
# with open(labels_fname, 'r') as f:
# label_names = pickle.load(f)
with open(labels_fname, 'r') as f:
label_names = yaml.safe_load(f)['label_names']
plot_grouped_causality_circle(caus[0], yaml_fname, label_names, n_lines=10,
labels_mode=None, replacer_dict=None,
out_fname='causality_sample.png',
colormap='Blues', colorbar=True,
arrowstyle='->,head_length=1,head_width=1',
figsize=(10, 6), show=False)
t_end = time.time()
total_time_taken = t_end - t_start
print(('Total time taken in minutes: %f' % (total_time_taken / 60.)))
|
{
"content_hash": "53db733896b899742dee0fcd0a01b338",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 80,
"avg_line_length": 37.70469798657718,
"alnum_prop": 0.6571733713065148,
"repo_name": "pravsripad/jumeg",
"id": "c2c1b6e37beab82cdc7f906fd940525d074d88b6",
"size": "5642",
"binary": false,
"copies": "3",
"ref": "refs/heads/master_dev",
"path": "examples/causality/do_granger_causality.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2309512"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
class DynamicSettingConfig(AppConfig):
name = 'dynamic_setting'
verbose_name = 'Setting'
|
{
"content_hash": "ad57133ceacd539496d9c1f5fdc321a0",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 26.2,
"alnum_prop": 0.7557251908396947,
"repo_name": "koralarts/django-dynamic-settings",
"id": "1148b9a15c35728e13b6b36728805fd0d5637ec1",
"size": "131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dynamic_setting/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9950"
}
],
"symlink_target": ""
}
|
"""
Raw time-series data from BES detectors.
BES detectors generate differential signals such that "zero signal" or
"no light" corresponds to about -9.5 V. DC signal levels should be referenced
to the "zero signal" output. "No light" shots (due to failed shutters)
include 138545 and 138858.
BES detector channels **do not** correspond to permanent measurement locations.
BES sightlines observe fixed measurement locations, but sightline optical
fibers can be coupled into any detector channel based upon experimental needs.
Consequently, the measurement location of detector channels can change day to
day. That said, **most** BES data from 2010 adhered to a standard
configuration with channels 1-8 spanning the radial range R = 129-146 cm.
"""
#from .gui import gui
#from .fft import fft, plotfft, powerspectrum
#from .animation import animate
#from .movie import movie
#from .configuration import loadConfig
from .crosspower import plotcrosspower, plotcrossphase, plotcoherence
from .crosspower import crosssignal, plotcorrelation
#from .postprocess import postprocess as _postprocess
__all__ = ['crosssignal',
'plotcorrelation',
'plotcrosspower',
'plotcoherence',
'plotcrossphase',
# '_postprocess',
]
# __all__ = ['fft', 'plotfft', 'powerspectrum',
# 'animate', 'loadConfig', 'movie',
# 'gui',
# 'crosssignal', 'plotcrosspower', 'plotcoherence', 'plotcrossphase',
# 'plotcorrelation']
|
{
"content_hash": "2fab8a853e1c87b60c18a0f7a7df6b7b",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 79,
"avg_line_length": 39.60526315789474,
"alnum_prop": 0.7142857142857143,
"repo_name": "drsmith48/fdp",
"id": "55d6451227dfce402a2a33bffc9b228b1279a832",
"size": "1529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fdp/methods/nstxu/bes/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "4029"
},
{
"name": "Python",
"bytes": "176613"
}
],
"symlink_target": ""
}
|
"""Tests for server initialization and loop logic."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2017 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
import pytest
from cwmud.core.server import EVENTS, SERVER
# We need a dummy pid.
if SERVER._pid is None:
SERVER._pid = 0
def test_boot():
"""Test that we can initiate and boot the server."""
array = []
# This one should not fire, as init is not pre-hookable.
@EVENTS.hook("server_init", pre=True)
def _init_pre_hook():
array.append(0)
@EVENTS.hook("server_init")
def _init_post_hook_1():
array.append(1)
@EVENTS.hook("server_boot")
def _init_post_hook_2():
array.append(2)
SERVER.boot()
assert array == [1, 2]
def test_loop():
"""Test that we can loop through the server."""
class _DummyException(Exception):
pass
@EVENTS.hook("server_loop")
def _loop_hook():
raise _DummyException()
with pytest.raises(_DummyException):
SERVER.loop()
|
{
"content_hash": "e70568d1d4c8c11a0f1db9e473f516d8",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 73,
"avg_line_length": 21.725490196078432,
"alnum_prop": 0.6308664259927798,
"repo_name": "whutch/atria",
"id": "e3d4f731b53635e41c040af8a3fd4b80b5cbe615",
"size": "1132",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/core/test_server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "637"
},
{
"name": "Python",
"bytes": "405976"
}
],
"symlink_target": ""
}
|
import sys
try:
from collections import OrderedDict
except ImportError:
# python 2.6 or earlier, use backport
from ordereddict import OrderedDict
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PYPY = hasattr(sys, 'pypy_translation_info')
_identity = lambda x: x
if PY3:
def b(s):
return s.encode('latin-1')
def u(s):
return s
unichr = chr
range_type = range
text_type = str
builtin_str = str
binary_type = bytes
string_types = (str,)
integer_types = (int,)
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
import pickle
from io import BytesIO, StringIO
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
from functools import reduce
ifilter = filter
imap = map
izip = zip
intern = sys.intern
implements_iterator = _identity
implements_to_string = _identity
encode_filename = _identity
get_next = lambda x: x.__next__
from urllib.parse import urlparse
from urllib.request import urlopen
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), 'unicode_escape')
unichr = unichr
range_type = xrange
text_type = unicode
builtin_str = str
binary_type = str
string_types = (basestring, str, unicode)
integer_types = (int, long)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
NativeStringIO = BytesIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
reduce = reduce
from itertools import imap, izip, ifilter
intern = intern
def implements_iterator(cls):
cls.next = cls.__next__
del cls.__next__
return cls
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
get_next = lambda x: x.next
def encode_filename(filename):
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
from urlparse import urlparse
from urllib import urlopen
def to_native_string(string, encoding='ascii'):
"""
Given a string object, regardless of type, returns a representation of that
string in the native string type, encoding and decoding where necessary.
This assumes ASCII unless told otherwise.
"""
out = None
if isinstance(string, builtin_str):
out = string
else:
if PY2:
out = string.encode(encoding)
else:
out = string.decode(encoding)
return out
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instanciation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
|
{
"content_hash": "1697d2eede0ac8fc43b50b9dff065aba",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 79,
"avg_line_length": 28.66176470588235,
"alnum_prop": 0.6285274499743458,
"repo_name": "moonrabbit/flask-jsonrpc",
"id": "8fe4ab029b9b189cf1bf4996891286b113078bed",
"size": "5591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flask_jsonrpc/_compat.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "24223"
},
{
"name": "HTML",
"bytes": "14257"
},
{
"name": "JavaScript",
"bytes": "103580"
},
{
"name": "Python",
"bytes": "113827"
}
],
"symlink_target": ""
}
|
import hashlib
import os
import posixpath
from dmunit import DeviceManagerTestCase
class Cat2TestCase(DeviceManagerTestCase):
def runTest(self):
"""This tests copying a binary file to and from the device the binary.
File is > 64K.
"""
testroot = posixpath.join(self.dm.getDeviceRoot(), 'infratest')
self.dm.removeDir(testroot)
self.dm.mkDir(testroot)
origFile = open(os.path.join('test-files', 'mybinary.zip'), 'rb').read()
self.dm.pushFile(
os.path.join('test-files', 'mybinary.zip'),
posixpath.join(testroot, 'mybinary.zip'))
resultFile = self.dm.catFile(posixpath.join(testroot, 'mybinary.zip'))
self.assertEqual(hashlib.md5(origFile).hexdigest(),
hashlib.md5(resultFile).hexdigest())
|
{
"content_hash": "f039dd036a234ba791c0ea4c39fa48ca",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 80,
"avg_line_length": 37.130434782608695,
"alnum_prop": 0.6194379391100703,
"repo_name": "sergecodd/FireFox-OS",
"id": "59681268971b9f3006b0b56b0866cf9b3c49822c",
"size": "1054",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "B2G/gecko/testing/mozbase/mozdevice/sut_tests/test_cat2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "443"
},
{
"name": "ApacheConf",
"bytes": "85"
},
{
"name": "Assembly",
"bytes": "5123438"
},
{
"name": "Awk",
"bytes": "46481"
},
{
"name": "Batchfile",
"bytes": "56250"
},
{
"name": "C",
"bytes": "101720951"
},
{
"name": "C#",
"bytes": "38531"
},
{
"name": "C++",
"bytes": "148896543"
},
{
"name": "CMake",
"bytes": "23541"
},
{
"name": "CSS",
"bytes": "2758664"
},
{
"name": "DIGITAL Command Language",
"bytes": "56757"
},
{
"name": "Emacs Lisp",
"bytes": "12694"
},
{
"name": "Erlang",
"bytes": "889"
},
{
"name": "FLUX",
"bytes": "34449"
},
{
"name": "GLSL",
"bytes": "26344"
},
{
"name": "Gnuplot",
"bytes": "710"
},
{
"name": "Groff",
"bytes": "447012"
},
{
"name": "HTML",
"bytes": "43343468"
},
{
"name": "IDL",
"bytes": "1455122"
},
{
"name": "Java",
"bytes": "43261012"
},
{
"name": "JavaScript",
"bytes": "46646658"
},
{
"name": "Lex",
"bytes": "38358"
},
{
"name": "Logos",
"bytes": "21054"
},
{
"name": "Makefile",
"bytes": "2733844"
},
{
"name": "Matlab",
"bytes": "67316"
},
{
"name": "Max",
"bytes": "3698"
},
{
"name": "NSIS",
"bytes": "421625"
},
{
"name": "Objective-C",
"bytes": "877657"
},
{
"name": "Objective-C++",
"bytes": "737713"
},
{
"name": "PHP",
"bytes": "17415"
},
{
"name": "Pascal",
"bytes": "6780"
},
{
"name": "Perl",
"bytes": "1153180"
},
{
"name": "Perl6",
"bytes": "1255"
},
{
"name": "PostScript",
"bytes": "1139"
},
{
"name": "PowerShell",
"bytes": "8252"
},
{
"name": "Protocol Buffer",
"bytes": "26553"
},
{
"name": "Python",
"bytes": "8453201"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3481"
},
{
"name": "Ruby",
"bytes": "5116"
},
{
"name": "Scilab",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "3383832"
},
{
"name": "SourcePawn",
"bytes": "23661"
},
{
"name": "TeX",
"bytes": "879606"
},
{
"name": "WebIDL",
"bytes": "1902"
},
{
"name": "XSLT",
"bytes": "13134"
},
{
"name": "Yacc",
"bytes": "112744"
}
],
"symlink_target": ""
}
|
"""
Miscellaneous functions that don't fit anywhere else.
"""
from __future__ import division
import os
import sys
import re
import math
import imp
import string
import threading
import signal
import pkgutil
import traceback
import logging
import random
import hashlib
import subprocess
from subprocess import CalledProcessError
from datetime import datetime, timedelta
from operator import mul, itemgetter
from StringIO import StringIO
from itertools import cycle, groupby
from functools import partial
from distutils.spawn import find_executable
import yaml
from dateutil import tz
# ABI --> architectures list
ABI_MAP = {
'armeabi': ['armeabi', 'armv7', 'armv7l', 'armv7el', 'armv7lh', 'armeabi-v7a'],
'arm64': ['arm64', 'armv8', 'arm64-v8a', 'aarch64'],
}
def preexec_function():
# Ignore the SIGINT signal by setting the handler to the standard
# signal handler SIG_IGN.
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Change process group in case we have to kill the subprocess and all of
# its children later.
# TODO: this is Unix-specific; would be good to find an OS-agnostic way
# to do this in case we wanna port WA to Windows.
os.setpgrp()
check_output_logger = logging.getLogger('check_output')
# Defined here rather than in wlauto.exceptions due to module load dependencies
class TimeoutError(Exception):
"""Raised when a subprocess command times out. This is basically a ``WAError``-derived version
of ``subprocess.CalledProcessError``, the thinking being that while a timeout could be due to
programming error (e.g. not setting long enough timers), it is often due to some failure in the
environment, and there fore should be classed as a "user error"."""
def __init__(self, command, output):
super(TimeoutError, self).__init__('Timed out: {}'.format(command))
self.command = command
self.output = output
def __str__(self):
return '\n'.join([self.message, 'OUTPUT:', self.output or ''])
class CalledProcessErrorWithStderr(CalledProcessError):
def __init__(self, *args, **kwargs):
self.output = kwargs.pop("output")
self.error = kwargs.pop("error")
super(CalledProcessErrorWithStderr, self).__init__(*args, **kwargs)
def __str__(self):
return '{}\nSTDOUT: {}\nSTDERR:{}'.format(CalledProcessError.__str__(self),
self.output, self.error)
__repr__ = __str__
def check_output(command, timeout=None, ignore=None, **kwargs):
"""This is a version of subprocess.check_output that adds a timeout parameter to kill
the subprocess if it does not return within the specified time."""
# pylint: disable=too-many-branches
if ignore is None:
ignore = []
elif isinstance(ignore, int):
ignore = [ignore]
elif not isinstance(ignore, list) and ignore != 'all':
message = 'Invalid value for ignore parameter: "{}"; must be an int or a list'
raise ValueError(message.format(ignore))
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
def callback(pid):
try:
check_output_logger.debug('{} timed out; sending SIGKILL'.format(pid))
os.killpg(pid, signal.SIGKILL)
except OSError:
pass # process may have already terminated.
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
preexec_fn=preexec_function, **kwargs)
if timeout:
timer = threading.Timer(timeout, callback, [process.pid, ])
timer.start()
try:
output, error = process.communicate()
finally:
if timeout:
timer.cancel()
retcode = process.poll()
if retcode:
if retcode == -9: # killed, assume due to timeout callback
raise TimeoutError(command, output='\n'.join([output, error]))
elif ignore != 'all' and retcode not in ignore:
raise CalledProcessErrorWithStderr(retcode, command, output=output, error=error)
return output, error
def walk_modules(path):
"""
Given package name, return a list of all modules (including submodules, etc)
in that package.
"""
root_mod = __import__(path, {}, {}, [''])
mods = [root_mod]
for _, name, ispkg in pkgutil.iter_modules(root_mod.__path__):
submod_path = '.'.join([path, name])
if ispkg:
mods.extend(walk_modules(submod_path))
else:
submod = __import__(submod_path, {}, {}, [''])
mods.append(submod)
return mods
def ensure_directory_exists(dirpath):
"""A filter for directory paths to ensure they exist."""
if not os.path.isdir(dirpath):
os.makedirs(dirpath)
return dirpath
def ensure_file_directory_exists(filepath):
"""
A filter for file paths to ensure the directory of the
file exists and the file can be created there. The file
itself is *not* going to be created if it doesn't already
exist.
"""
ensure_directory_exists(os.path.dirname(filepath))
return filepath
def diff_tokens(before_token, after_token):
"""
Creates a diff of two tokens.
If the two tokens are the same it just returns returns the token
(whitespace tokens are considered the same irrespective of type/number
of whitespace characters in the token).
If the tokens are numeric, the difference between the two values
is returned.
Otherwise, a string in the form [before -> after] is returned.
"""
if before_token.isspace() and after_token.isspace():
return after_token
elif before_token.isdigit() and after_token.isdigit():
try:
diff = int(after_token) - int(before_token)
return str(diff)
except ValueError:
return "[%s -> %s]" % (before_token, after_token)
elif before_token == after_token:
return after_token
else:
return "[%s -> %s]" % (before_token, after_token)
def prepare_table_rows(rows):
"""Given a list of lists, make sure they are prepared to be formatted into a table
by making sure each row has the same number of columns and stringifying all values."""
rows = [map(str, r) for r in rows]
max_cols = max(map(len, rows))
for row in rows:
pad = max_cols - len(row)
for _ in xrange(pad):
row.append('')
return rows
def write_table(rows, wfh, align='>', headers=None): # pylint: disable=R0914
"""Write a column-aligned table to the specified file object."""
if not rows:
return
rows = prepare_table_rows(rows)
num_cols = len(rows[0])
# cycle specified alignments until we have max_cols of them. This is
# consitent with how such cases are handled in R, pandas, etc.
it = cycle(align)
align = [it.next() for _ in xrange(num_cols)]
cols = zip(*rows)
col_widths = [max(map(len, c)) for c in cols]
row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
row_format += '\n'
if headers:
wfh.write(row_format.format(*headers))
underlines = ['-' * len(h) for h in headers]
wfh.write(row_format.format(*underlines))
for row in rows:
wfh.write(row_format.format(*row))
def get_null():
"""Returns the correct null sink based on the OS."""
return 'NUL' if os.name == 'nt' else '/dev/null'
def get_traceback(exc=None):
"""
Returns the string with the traceback for the specifiec exc
object, or for the current exception exc is not specified.
"""
if exc is None:
exc = sys.exc_info()
if not exc:
return None
tb = exc[2]
sio = StringIO()
traceback.print_tb(tb, file=sio)
del tb # needs to be done explicitly see: http://docs.python.org/2/library/sys.html#sys.exc_info
return sio.getvalue()
def merge_dicts(*args, **kwargs):
if len(args) < 2:
raise ValueError('Must specify at least two dicts to merge.')
func = partial(_merge_two_dicts, **kwargs)
return reduce(func, args)
def _merge_two_dicts(base, other, list_duplicates='all', match_types=False, # pylint: disable=R0912,R0914
dict_type=dict, should_normalize=True, should_merge_lists=True):
"""Merge dicts normalizing their keys."""
merged = dict_type()
base_keys = base.keys()
other_keys = other.keys()
norm = normalize if should_normalize else lambda x, y: x
base_only = []
other_only = []
both = []
union = []
for k in base_keys:
if k in other_keys:
both.append(k)
else:
base_only.append(k)
union.append(k)
for k in other_keys:
if k in base_keys:
union.append(k)
else:
union.append(k)
other_only.append(k)
for k in union:
if k in base_only:
merged[k] = norm(base[k], dict_type)
elif k in other_only:
merged[k] = norm(other[k], dict_type)
elif k in both:
base_value = base[k]
other_value = other[k]
base_type = type(base_value)
other_type = type(other_value)
if (match_types and (base_type != other_type) and
(base_value is not None) and (other_value is not None)):
raise ValueError('Type mismatch for {} got {} ({}) and {} ({})'.format(k, base_value, base_type,
other_value, other_type))
if isinstance(base_value, dict):
merged[k] = _merge_two_dicts(base_value, other_value, list_duplicates, match_types, dict_type)
elif isinstance(base_value, list):
if should_merge_lists:
merged[k] = _merge_two_lists(base_value, other_value, list_duplicates, dict_type)
else:
merged[k] = _merge_two_lists([], other_value, list_duplicates, dict_type)
elif isinstance(base_value, set):
merged[k] = norm(base_value.union(other_value), dict_type)
else:
merged[k] = norm(other_value, dict_type)
else: # Should never get here
raise AssertionError('Unexpected merge key: {}'.format(k))
return merged
def merge_lists(*args, **kwargs):
if len(args) < 2:
raise ValueError('Must specify at least two lists to merge.')
func = partial(_merge_two_lists, **kwargs)
return reduce(func, args)
def _merge_two_lists(base, other, duplicates='all', dict_type=dict): # pylint: disable=R0912
"""
Merge lists, normalizing their entries.
parameters:
:base, other: the two lists to be merged. ``other`` will be merged on
top of base.
:duplicates: Indicates the strategy of handling entries that appear
in both lists. ``all`` will keep occurrences from both
lists; ``first`` will only keep occurrences from
``base``; ``last`` will only keep occurrences from
``other``;
.. note:: duplicate entries that appear in the *same* list
will never be removed.
"""
if not isiterable(base):
base = [base]
if not isiterable(other):
other = [other]
if duplicates == 'all':
merged_list = []
for v in normalize(base, dict_type) + normalize(other, dict_type):
if not _check_remove_item(merged_list, v):
merged_list.append(v)
return merged_list
elif duplicates == 'first':
base_norm = normalize(base, dict_type)
merged_list = normalize(base, dict_type)
for v in base_norm:
_check_remove_item(merged_list, v)
for v in normalize(other, dict_type):
if not _check_remove_item(merged_list, v):
if v not in base_norm:
merged_list.append(v) # pylint: disable=no-member
return merged_list
elif duplicates == 'last':
other_norm = normalize(other, dict_type)
merged_list = []
for v in normalize(base, dict_type):
if not _check_remove_item(merged_list, v):
if v not in other_norm:
merged_list.append(v)
for v in other_norm:
if not _check_remove_item(merged_list, v):
merged_list.append(v)
return merged_list
else:
raise ValueError('Unexpected value for list duplicates argument: {}. '.format(duplicates) +
'Must be in {"all", "first", "last"}.')
def _check_remove_item(the_list, item):
"""Helper function for merge_lists that implements checking wether an items
should be removed from the list and doing so if needed. Returns ``True`` if
the item has been removed and ``False`` otherwise."""
if not isinstance(item, basestring):
return False
if not item.startswith('~'):
return False
actual_item = item[1:]
if actual_item in the_list:
del the_list[the_list.index(actual_item)]
return True
def normalize(value, dict_type=dict):
"""Normalize values. Recursively normalizes dict keys to be lower case,
no surrounding whitespace, underscore-delimited strings."""
if isinstance(value, dict):
normalized = dict_type()
for k, v in value.iteritems():
if isinstance(k, basestring):
k = k.strip().lower().replace(' ', '_')
normalized[k] = normalize(v, dict_type)
return normalized
elif isinstance(value, list):
return [normalize(v, dict_type) for v in value]
elif isinstance(value, tuple):
return tuple([normalize(v, dict_type) for v in value])
else:
return value
VALUE_REGEX = re.compile(r'(\d+(?:\.\d+)?)\s*(\w*)')
UNITS_MAP = {
's': 'seconds',
'ms': 'milliseconds',
'us': 'microseconds',
'ns': 'nanoseconds',
'V': 'volts',
'A': 'amps',
'mA': 'milliamps',
'J': 'joules',
}
def parse_value(value_string):
"""parses a string representing a numerical value and returns
a tuple (value, units), where value will be either int or float,
and units will be a string representing the units or None."""
match = VALUE_REGEX.search(value_string)
if match:
vs = match.group(1)
value = float(vs) if '.' in vs else int(vs)
us = match.group(2)
units = UNITS_MAP.get(us, us)
return (value, units)
else:
return (value_string, None)
def get_meansd(values):
"""Returns mean and standard deviation of the specified values."""
if not values:
return float('nan'), float('nan')
mean = sum(values) / len(values)
sd = math.sqrt(sum([(v - mean) ** 2 for v in values]) / len(values))
return mean, sd
def geomean(values):
"""Returns the geometric mean of the values."""
return reduce(mul, values) ** (1.0 / len(values))
def capitalize(text):
"""Capitalises the specified text: first letter upper case,
all subsequent letters lower case."""
if not text:
return ''
return text[0].upper() + text[1:].lower()
def convert_new_lines(text):
""" Convert new lines to a common format. """
return text.replace('\r\n', '\n').replace('\r', '\n')
def escape_quotes(text):
"""Escape quotes, and escaped quotes, in the specified text."""
return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\\\'').replace('\"', '\\\"')
def escape_single_quotes(text):
"""Escape single quotes, and escaped single quotes, in the specified text."""
return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\'', '\'\\\'\'')
def escape_double_quotes(text):
"""Escape double quotes, and escaped double quotes, in the specified text."""
return re.sub(r'\\("|\')', r'\\\\\1', text).replace('\"', '\\\"')
def getch(count=1):
"""Read ``count`` characters from standard input."""
if os.name == 'nt':
import msvcrt # pylint: disable=F0401
return ''.join([msvcrt.getch() for _ in xrange(count)])
else: # assume Unix
import tty # NOQA
import termios # NOQA
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(count)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def isiterable(obj):
"""Returns ``True`` if the specified object is iterable and
*is not a string type*, ``False`` otherwise."""
return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
def utc_to_local(dt):
"""Convert naive datetime to local time zone, assuming UTC."""
return dt.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())
def local_to_utc(dt):
"""Convert naive datetime to UTC, assuming local time zone."""
return dt.replace(tzinfo=tz.tzlocal()).astimezone(tz.tzutc())
def as_relative(path):
"""Convert path to relative by stripping away the leading '/' on UNIX or
the equivant on other platforms."""
path = os.path.splitdrive(path)[1]
return path.lstrip(os.sep)
def get_cpu_mask(cores):
"""Return a string with the hex for the cpu mask for the specified core numbers."""
mask = 0
for i in cores:
mask |= 1 << i
return '0x{0:x}'.format(mask)
def load_class(classpath):
"""Loads the specified Python class. ``classpath`` must be a fully-qualified
class name (i.e. namspaced under module/package)."""
modname, clsname = classpath.rsplit('.', 1)
return getattr(__import__(modname), clsname)
def get_pager():
"""Returns the name of the system pager program."""
pager = os.getenv('PAGER')
if pager is None:
pager = find_executable('less')
if pager is None:
pager = find_executable('more')
return pager
def enum_metaclass(enum_param, return_name=False, start=0):
"""
Returns a ``type`` subclass that may be used as a metaclass for
an enum.
Paremeters:
:enum_param: the name of class attribute that defines enum values.
The metaclass will add a class attribute for each value in
``enum_param``. The value of the attribute depends on the type
of ``enum_param`` and on the values of ``return_name``. If
``return_name`` is ``True``, then the value of the new attribute is
the name of that attribute; otherwise, if ``enum_param`` is a ``list``
or a ``tuple``, the value will be the index of that param in
``enum_param``, optionally offset by ``start``, otherwise, it will
be assumed that ``enum_param`` implementa a dict-like inteface and
the value will be ``enum_param[attr_name]``.
:return_name: If ``True``, the enum values will the names of enum attributes. If
``False``, the default, the values will depend on the type of
``enum_param`` (see above).
:start: If ``enum_param`` is a list or a tuple, and ``return_name`` is ``False``,
this specifies an "offset" that will be added to the index of the attribute
within ``enum_param`` to form the value.
"""
class __EnumMeta(type):
def __new__(mcs, clsname, bases, attrs):
cls = type.__new__(mcs, clsname, bases, attrs)
values = getattr(cls, enum_param, [])
if return_name:
for name in values:
setattr(cls, name, name)
else:
if isinstance(values, list) or isinstance(values, tuple):
for i, name in enumerate(values):
setattr(cls, name, i + start)
else: # assume dict-like
for name in values:
setattr(cls, name, values[name])
return cls
return __EnumMeta
def which(name):
"""Platform-independent version of UNIX which utility."""
if os.name == 'nt':
paths = os.getenv('PATH').split(os.pathsep)
exts = os.getenv('PATHEXT').split(os.pathsep)
for path in paths:
testpath = os.path.join(path, name)
if os.path.isfile(testpath):
return testpath
for ext in exts:
testpathext = testpath + ext
if os.path.isfile(testpathext):
return testpathext
return None
else: # assume UNIX-like
try:
return check_output(['which', name])[0].strip()
except subprocess.CalledProcessError:
return None
_bash_color_regex = re.compile('\x1b\[[0-9;]+m')
def strip_bash_colors(text):
return _bash_color_regex.sub('', text)
def format_duration(seconds, sep=' ', order=['day', 'hour', 'minute', 'second']): # pylint: disable=dangerous-default-value
"""
Formats the specified number of seconds into human-readable duration.
"""
if isinstance(seconds, timedelta):
td = seconds
else:
td = timedelta(seconds=seconds)
dt = datetime(1, 1, 1) + td
result = []
for item in order:
value = getattr(dt, item, None)
if item is 'day':
value -= 1
if not value:
continue
suffix = '' if value == 1 else 's'
result.append('{} {}{}'.format(value, item, suffix))
return sep.join(result)
def get_article(word):
"""
Returns the appropriate indefinite article for the word (ish).
.. note:: Indefinite article assignment in English is based on
sound rather than spelling, so this will not work correctly
in all case; e.g. this will return ``"a hour"``.
"""
return'an' if word[0] in 'aoeiu' else 'a'
def get_random_string(length):
"""Returns a random ASCII string of the specified length)."""
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
class LoadSyntaxError(Exception):
def __init__(self, message, filepath, lineno):
super(LoadSyntaxError, self).__init__(message)
self.filepath = filepath
self.lineno = lineno
def __str__(self):
message = 'Syntax Error in {}, line {}:\n\t{}'
return message.format(self.filepath, self.lineno, self.message)
RAND_MOD_NAME_LEN = 30
BAD_CHARS = string.punctuation + string.whitespace
TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
def to_identifier(text):
"""Converts text to a valid Python identifier by replacing all
whitespace and punctuation."""
return re.sub('_+', '_', text.translate(TRANS_TABLE))
def load_struct_from_python(filepath=None, text=None):
"""Parses a config structure from a .py file. The structure should be composed
of basic Python types (strings, ints, lists, dicts, etc.)."""
if not (filepath or text) or (filepath and text):
raise ValueError('Exactly one of filepath or text must be specified.')
try:
if filepath:
modname = to_identifier(filepath)
mod = imp.load_source(modname, filepath)
else:
modname = get_random_string(RAND_MOD_NAME_LEN)
while modname in sys.modules: # highly unlikely, but...
modname = get_random_string(RAND_MOD_NAME_LEN)
mod = imp.new_module(modname)
exec text in mod.__dict__ # pylint: disable=exec-used
return dict((k, v)
for k, v in mod.__dict__.iteritems()
if not k.startswith('_'))
except SyntaxError as e:
raise LoadSyntaxError(e.message, filepath, e.lineno)
def load_struct_from_yaml(filepath=None, text=None):
"""Parses a config structure from a .yaml file. The structure should be composed
of basic Python types (strings, ints, lists, dicts, etc.)."""
if not (filepath or text) or (filepath and text):
raise ValueError('Exactly one of filepath or text must be specified.')
try:
if filepath:
with open(filepath) as fh:
return yaml.load(fh)
else:
return yaml.load(text)
except yaml.YAMLError as e:
lineno = None
if hasattr(e, 'problem_mark'):
lineno = e.problem_mark.line # pylint: disable=no-member
raise LoadSyntaxError(e.message, filepath=filepath, lineno=lineno)
def load_struct_from_file(filepath):
"""
Attempts to parse a Python structure consisting of basic types from the specified file.
Raises a ``ValueError`` if the specified file is of unkown format; ``LoadSyntaxError`` if
there is an issue parsing the file.
"""
extn = os.path.splitext(filepath)[1].lower()
if (extn == '.py') or (extn == '.pyc') or (extn == '.pyo'):
return load_struct_from_python(filepath)
elif extn == '.yaml':
return load_struct_from_yaml(filepath)
else:
raise ValueError('Unknown format "{}": {}'.format(extn, filepath))
def unique(alist):
"""
Returns a list containing only unique elements from the input list (but preserves
order, unlike sets).
"""
result = []
for item in alist:
if item not in result:
result.append(item)
return result
def open_file(filepath):
"""
Open the specified file path with the associated launcher in an OS-agnostic way.
"""
if os.name == 'nt': # Windows
return os.startfile(filepath) # pylint: disable=no-member
elif sys.platform == 'darwin': # Mac OSX
return subprocess.call(['open', filepath])
else: # assume Linux or similar running a freedesktop-compliant GUI
return subprocess.call(['xdg-open', filepath])
def ranges_to_list(ranges_string):
"""Converts a sysfs-style ranges string, e.g. ``"0,2-4"``, into a list ,e.g ``[0,2,3,4]``"""
values = []
for rg in ranges_string.split(','):
if '-' in rg:
first, last = map(int, rg.split('-'))
values.extend(xrange(first, last + 1))
else:
values.append(int(rg))
return values
def list_to_ranges(values):
"""Converts a list, e.g ``[0,2,3,4]``, into a sysfs-style ranges string, e.g. ``"0,2-4"``"""
range_groups = []
for _, g in groupby(enumerate(values), lambda (i, x): i - x):
range_groups.append(map(itemgetter(1), g))
range_strings = []
for group in range_groups:
if len(group) == 1:
range_strings.append(str(group[0]))
else:
range_strings.append('{}-{}'.format(group[0], group[-1]))
return ','.join(range_strings)
def list_to_mask(values, base=0x0):
"""Converts the specified list of integer values into
a bit mask for those values. Optinally, the list can be
applied to an existing mask."""
for v in values:
base |= (1 << v)
return base
def mask_to_list(mask):
"""Converts the specfied integer bitmask into a list of
indexes of bits that are set in the mask."""
size = len(bin(mask)) - 2 # because of "0b"
return [size - i - 1 for i in xrange(size)
if mask & (1 << size - i - 1)]
def sha256(path, chunk=2048):
"""Calculates SHA256 hexdigest of the file at the specified path."""
h = hashlib.sha256()
with open(path, 'rb') as fh:
buf = fh.read(chunk)
while buf:
h.update(buf)
buf = fh.read(chunk)
return h.hexdigest()
def urljoin(*parts):
return '/'.join(p.rstrip('/') for p in parts)
__memo_cache = {}
def memoized(func):
"""A decorator for memoizing functions and methods."""
func_id = repr(func)
def memoize_wrapper(*args, **kwargs):
id_string = func_id + ','.join([str(id(a)) for a in args])
id_string += ','.join('{}={}'.format(k, v)
for k, v in kwargs.iteritems())
if id_string not in __memo_cache:
__memo_cache[id_string] = func(*args, **kwargs)
return __memo_cache[id_string]
return memoize_wrapper
def commonprefix(file_list, sep=os.sep):
"""
Find the lowest common base folder of a passed list of files.
"""
common_path = os.path.commonprefix(file_list)
cp_split = common_path.split(sep)
other_split = file_list[0].split(sep)
last = len(cp_split) - 1
if cp_split[last] != other_split[last]:
cp_split = cp_split[:-1]
return sep.join(cp_split)
|
{
"content_hash": "1b4e2fbf37603d1de45eca55ef43c6d9",
"timestamp": "",
"source": "github",
"line_count": 847,
"max_line_length": 124,
"avg_line_length": 33.91263282172373,
"alnum_prop": 0.5996727475281994,
"repo_name": "bjackman/workload-automation",
"id": "a66e985beb6f573554290459d7516c521baa5dc5",
"size": "29311",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wlauto/utils/misc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "40003"
},
{
"name": "HTML",
"bytes": "243720"
},
{
"name": "Java",
"bytes": "226912"
},
{
"name": "JavaScript",
"bytes": "6578"
},
{
"name": "Jupyter Notebook",
"bytes": "1322"
},
{
"name": "Makefile",
"bytes": "430"
},
{
"name": "Python",
"bytes": "1555462"
},
{
"name": "Shell",
"bytes": "39222"
},
{
"name": "Vim script",
"bytes": "901"
}
],
"symlink_target": ""
}
|
from setuptools import setup
setup(
name='learning-nodejs',
version='0.1.0',
)
|
{
"content_hash": "5ae0f837e7e2b7e48741fa5e3957b1a9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 28,
"avg_line_length": 14.666666666666666,
"alnum_prop": 0.6590909090909091,
"repo_name": "imsardine/learning",
"id": "d39d4cdc4963630aced40063641c1d4679fb53d8",
"size": "88",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nodejs/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "5865"
},
{
"name": "HTML",
"bytes": "1033"
},
{
"name": "Java",
"bytes": "90086"
},
{
"name": "JavaScript",
"bytes": "2052"
},
{
"name": "Makefile",
"bytes": "17149"
},
{
"name": "Python",
"bytes": "328931"
},
{
"name": "Rust",
"bytes": "899"
},
{
"name": "Shell",
"bytes": "610"
},
{
"name": "Swift",
"bytes": "3004"
}
],
"symlink_target": ""
}
|
"""Add password reset expiry
Revision ID: 217af36c820
Revises: f9897e968a
Create Date: 2014-06-28 11:04:23.134422
"""
# revision identifiers, used by Alembic.
revision = '217af36c820'
down_revision = 'f9897e968a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('passwordResetExpiry', sa.DateTime(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'passwordResetExpiry')
### end Alembic commands ###
|
{
"content_hash": "b72cc0777ef4ec9b166229fdcde68d9e",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 89,
"avg_line_length": 24.384615384615383,
"alnum_prop": 0.6987381703470031,
"repo_name": "Kerbas-ad-astra/KerbalStuff",
"id": "0a702c8a9d7b77b9870d70ae47f0f16ea5505143",
"size": "634",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "alembic/versions/217af36c820_add_password_reset_expiry.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "117498"
},
{
"name": "CoffeeScript",
"bytes": "30971"
},
{
"name": "HTML",
"bytes": "137143"
},
{
"name": "JavaScript",
"bytes": "210471"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "138384"
}
],
"symlink_target": ""
}
|
"""Views for when a user first visits."""
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic import TemplateView
from open_connect.connect_core.utils.views import CommonViewMixin
class WelcomeView(CommonViewMixin, TemplateView):
"""WelcomeView redirects users to the appropriate page based."""
template_name = 'welcome.html'
title = "Welcome"
def get(self, request, *args, **kwargs):
"""Process get request."""
if request.user.is_authenticated():
if request.user.groups.all().exists():
return HttpResponseRedirect(reverse('threads'))
else:
return HttpResponseRedirect(reverse('groups'))
return super(WelcomeView, self).get(request, *args, **kwargs)
|
{
"content_hash": "8144b88dd9be25d006eb76377d8c2992",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 69,
"avg_line_length": 37.04545454545455,
"alnum_prop": 0.6883435582822086,
"repo_name": "lpatmo/actionify_the_news",
"id": "3759e14ff745c0e2796ce50e0fd59b6c88780ff3",
"size": "815",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "open_connect/welcome/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "546928"
},
{
"name": "HTML",
"bytes": "151617"
},
{
"name": "JavaScript",
"bytes": "211965"
},
{
"name": "Python",
"bytes": "882989"
}
],
"symlink_target": ""
}
|
import json
from datetime import date, datetime
class CJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%d %H:%M:%S')
elif isinstance(obj, date):
return obj.strftime('%Y-%m-%d')
else:
return json.JSONEncoder.default(self, obj)
def InvertDict(d):
from itertools import izip
return dict(izip(d.itervalues(), d.iterkeys()))
|
{
"content_hash": "9111e3120b5712e7cb6e8be788670a68",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 54,
"avg_line_length": 30.466666666666665,
"alnum_prop": 0.6258205689277899,
"repo_name": "Z2Y/CUIT-ACM-Website",
"id": "41fb6b131cfb499127e0ed2993a8ddb920a2ec5d",
"size": "457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "util/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "311196"
},
{
"name": "HTML",
"bytes": "1941814"
},
{
"name": "JavaScript",
"bytes": "2024851"
},
{
"name": "PHP",
"bytes": "1684"
},
{
"name": "Python",
"bytes": "142714"
}
],
"symlink_target": ""
}
|
from django.db.models import CharField
from django.utils.translation import ugettext_lazy as _
from localflavor.deprecation import DeprecatedPhoneNumberField
from .forms import USPhoneNumberField as USPhoneNumberFormField
from .forms import USSocialSecurityNumberField as USSocialSecurityNumberFieldFormField
from .forms import USZipCodeField as USZipCodeFormField
from .us_states import STATE_CHOICES, USPS_CHOICES
class USStateField(CharField):
"""
A model field that stores the two-letter U.S. state abbreviation in the database.
Forms represent it as a ``forms.USStateField`` field.
"""
description = _("U.S. state (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = STATE_CHOICES
kwargs['max_length'] = 2
super(USStateField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(USStateField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class USPostalCodeField(CharField):
"""
A model field that stores the two-letter U.S. Postal Service abbreviation in the database.
Forms represent it as a :class:`~localflavor.us.forms.USPSSelect`` field.
.. note::
If you are looking for a model field that validates U.S. ZIP codes
please use :class:`~localflavor.us.models.USZipCodeField`.
"""
description = _("U.S. postal code (two uppercase letters)")
def __init__(self, *args, **kwargs):
kwargs['choices'] = USPS_CHOICES
kwargs['max_length'] = 2
super(USPostalCodeField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(USPostalCodeField, self).deconstruct()
del kwargs['choices']
return name, path, args, kwargs
class USZipCodeField(CharField):
"""
A model field that stores the U.S. ZIP code in the database.
Forms represent it as a :class:`~localflavor.us.forms.USZipCodeField` field.
.. note::
If you are looking for a model field with a list of U.S. Postal Service
locations please use :class:`~localflavor.us.models.USPostalCodeField`.
.. versionadded:: 1.1
"""
description = _("U.S. ZIP code")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 10
super(USZipCodeField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': USZipCodeFormField}
defaults.update(kwargs)
return super(USZipCodeField, self).formfield(**defaults)
class PhoneNumberField(CharField, DeprecatedPhoneNumberField):
"""
A :class:`~django.db.models.CharField` that checks that the value is a valid U.S.A.-style phone number.
(in the format ``XXX-XXX-XXXX``).
"""
description = _("Phone number")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 20
super(PhoneNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': USPhoneNumberFormField}
defaults.update(kwargs)
return super(PhoneNumberField, self).formfield(**defaults)
class USSocialSecurityNumberField(CharField):
"""
A model field that stores the security number in the format ``XXX-XX-XXXX``.
Forms represent it as ``forms.USSocialSecurityNumberField`` field.
.. versionadded:: 1.1
"""
description = _("Social security number")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 11
super(USSocialSecurityNumberField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {'form_class': USSocialSecurityNumberFieldFormField}
defaults.update(kwargs)
return super(USSocialSecurityNumberField, self).formfield(**defaults)
|
{
"content_hash": "ff9a86db62ce548ef5149e00e69b58c4",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 107,
"avg_line_length": 31.859504132231404,
"alnum_prop": 0.6658884565499351,
"repo_name": "jieter/django-localflavor",
"id": "67d12c2b47fe99c1094a9979a97b1f560681e066",
"size": "3855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "localflavor/us/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "896597"
}
],
"symlink_target": ""
}
|
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this data, including any software or models in source or binary
# form, as well as any drawings, specifications, and documentation
# (collectively "the Data"), to deal in the Data without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Data, and to
# permit persons to whom the Data is furnished to do so, subject to the
# following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Data.
# THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
# =======================
# This version of the META tools is a fork of an original version produced
# by Vanderbilt University's Institute for Software Integrated Systems (ISIS).
# Their license statement:
# Copyright (C) 2011-2014 Vanderbilt University
# Developed with the sponsorship of the Defense Advanced Research Projects
# Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights
# as defined in DFARS 252.227-7013.
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this data, including any software or models in source or binary
# form, as well as any drawings, specifications, and documentation
# (collectively "the Data"), to deal in the Data without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Data, and to
# permit persons to whom the Data is furnished to do so, subject to the
# following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Data.
# THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA.
# .\_iFAB.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:de45d6eea5b439c583053bcc995a5a07ca6f002d
# Generated 2014-11-18 14:25:23.918000 by PyXB version 1.2.3
# Namespace iFAB [xmlns:iFAB]
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:0613818f-6f61-11e4-85c1-542696dd94ef')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.3'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(u'iFAB', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, unicode):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {iFAB}baseMaterial
class baseMaterial (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'baseMaterial')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 101, 2)
_Documentation = None
baseMaterial._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=baseMaterial, enum_prefix=None)
baseMaterial.Al = baseMaterial._CF_enumeration.addEnumeration(unicode_value=u'Al', tag=u'Al')
baseMaterial.Plain_Carbon_Steel = baseMaterial._CF_enumeration.addEnumeration(unicode_value=u'Plain Carbon Steel', tag=u'Plain_Carbon_Steel')
baseMaterial._InitializeFacetMap(baseMaterial._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'baseMaterial', baseMaterial)
# Atomic simple type: {iFAB}fillerMaterial
class fillerMaterial (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'fillerMaterial')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 108, 2)
_Documentation = None
fillerMaterial._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'fillerMaterial', fillerMaterial)
# Atomic simple type: {iFAB}fluxMaterial
class fluxMaterial (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'fluxMaterial')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 112, 2)
_Documentation = None
fluxMaterial._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'fluxMaterial', fluxMaterial)
# Atomic simple type: {iFAB}glue
class glue (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'glue')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 116, 2)
_Documentation = None
glue._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', u'glue', glue)
# Atomic simple type: {iFAB}fasteningMethod
class fasteningMethod (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'fasteningMethod')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 120, 2)
_Documentation = None
fasteningMethod._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=fasteningMethod, enum_prefix=None)
fasteningMethod.Bolted = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Bolted', tag=u'Bolted')
fasteningMethod.Bolted_blind = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Bolted (blind)', tag=u'Bolted_blind')
fasteningMethod.Machined_Screw = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Machined Screw', tag=u'Machined_Screw')
fasteningMethod.Press_Fit = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Press Fit', tag=u'Press_Fit')
fasteningMethod.Snap_Fit = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Snap Fit', tag=u'Snap_Fit')
fasteningMethod.CrimpClamp_Fit = fasteningMethod._CF_enumeration.addEnumeration(unicode_value=u'Crimp/Clamp Fit', tag=u'CrimpClamp_Fit')
fasteningMethod._InitializeFacetMap(fasteningMethod._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'fasteningMethod', fasteningMethod)
# Atomic simple type: {iFAB}jointType
class jointType (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'jointType')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 131, 2)
_Documentation = None
jointType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=jointType, enum_prefix=None)
jointType.Butt = jointType._CF_enumeration.addEnumeration(unicode_value=u'Butt', tag=u'Butt')
jointType.Corner = jointType._CF_enumeration.addEnumeration(unicode_value=u'Corner', tag=u'Corner')
jointType.Edge = jointType._CF_enumeration.addEnumeration(unicode_value=u'Edge', tag=u'Edge')
jointType.Lap = jointType._CF_enumeration.addEnumeration(unicode_value=u'Lap', tag=u'Lap')
jointType.Tee = jointType._CF_enumeration.addEnumeration(unicode_value=u'Tee', tag=u'Tee')
jointType._InitializeFacetMap(jointType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'jointType', jointType)
# Atomic simple type: {iFAB}weldType
class weldType (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'weldType')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 141, 2)
_Documentation = None
weldType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=weldType, enum_prefix=None)
weldType.Seam = weldType._CF_enumeration.addEnumeration(unicode_value=u'Seam', tag=u'Seam')
weldType.Stitch = weldType._CF_enumeration.addEnumeration(unicode_value=u'Stitch', tag=u'Stitch')
weldType.Spot = weldType._CF_enumeration.addEnumeration(unicode_value=u'Spot', tag=u'Spot')
weldType._InitializeFacetMap(weldType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'weldType', weldType)
# Atomic simple type: {iFAB}weldPenetration
class weldPenetration (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'weldPenetration')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 149, 2)
_Documentation = None
weldPenetration._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=weldPenetration, enum_prefix=None)
weldPenetration.Full = weldPenetration._CF_enumeration.addEnumeration(unicode_value=u'Full', tag=u'Full')
weldPenetration.Partial = weldPenetration._CF_enumeration.addEnumeration(unicode_value=u'Partial', tag=u'Partial')
weldPenetration._InitializeFacetMap(weldPenetration._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'weldPenetration', weldPenetration)
# Atomic simple type: {iFAB}inspectionRequirement
class inspectionRequirement (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'inspectionRequirement')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 156, 2)
_Documentation = None
inspectionRequirement._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=inspectionRequirement, enum_prefix=None)
inspectionRequirement.Visual = inspectionRequirement._CF_enumeration.addEnumeration(unicode_value=u'Visual', tag=u'Visual')
inspectionRequirement.X_Ray = inspectionRequirement._CF_enumeration.addEnumeration(unicode_value=u'X-Ray', tag=u'X_Ray')
inspectionRequirement._InitializeFacetMap(inspectionRequirement._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'inspectionRequirement', inspectionRequirement)
# Atomic simple type: {iFAB}guid
class guid (pyxb.binding.datatypes.normalizedString):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'guid')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 12, 2)
_Documentation = None
guid._CF_pattern = pyxb.binding.facets.CF_pattern()
guid._CF_pattern.addPattern(pattern=u'[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}')
guid._InitializeFacetMap(guid._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'guid', guid)
# Atomic simple type: {iFAB}nonNegativeDecimal
class nonNegativeDecimal (pyxb.binding.datatypes.decimal):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'nonNegativeDecimal')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 18, 2)
_Documentation = None
nonNegativeDecimal._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=nonNegativeDecimal, value=pyxb.binding.datatypes.decimal(0.0))
nonNegativeDecimal._InitializeFacetMap(nonNegativeDecimal._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'nonNegativeDecimal', nonNegativeDecimal)
# Atomic simple type: {iFAB}currency
class currency (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'currency')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 122, 2)
_Documentation = None
currency._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=currency, enum_prefix=None)
currency.USD = currency._CF_enumeration.addEnumeration(unicode_value=u'USD', tag=u'USD')
currency._InitializeFacetMap(currency._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'currency', currency)
# Atomic simple type: {iFAB}timeUnit
class timeUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 128, 2)
_Documentation = None
timeUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=timeUnit, enum_prefix=None)
timeUnit.week = timeUnit._CF_enumeration.addEnumeration(unicode_value=u'week', tag=u'week')
timeUnit.day = timeUnit._CF_enumeration.addEnumeration(unicode_value=u'day', tag=u'day')
timeUnit.hr = timeUnit._CF_enumeration.addEnumeration(unicode_value=u'hr', tag=u'hr')
timeUnit.min = timeUnit._CF_enumeration.addEnumeration(unicode_value=u'min', tag=u'min')
timeUnit.sec = timeUnit._CF_enumeration.addEnumeration(unicode_value=u'sec', tag=u'sec')
timeUnit._InitializeFacetMap(timeUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'timeUnit', timeUnit)
# Atomic simple type: {iFAB}lengthUnit
class lengthUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'lengthUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 138, 2)
_Documentation = None
lengthUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=lengthUnit, enum_prefix=None)
lengthUnit.mile = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'mile', tag=u'mile')
lengthUnit.yd = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'yd', tag=u'yd')
lengthUnit.ft = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'ft', tag=u'ft')
lengthUnit.in_ = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'in', tag=u'in_')
lengthUnit.km = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'km', tag=u'km')
lengthUnit.m = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'm', tag=u'm')
lengthUnit.cm = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'cm', tag=u'cm')
lengthUnit.mm = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'mm', tag=u'mm')
lengthUnit.um = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'um', tag=u'um')
lengthUnit.nm = lengthUnit._CF_enumeration.addEnumeration(unicode_value=u'nm', tag=u'nm')
lengthUnit._InitializeFacetMap(lengthUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'lengthUnit', lengthUnit)
# Atomic simple type: {iFAB}areaUnit
class areaUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'areaUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 153, 2)
_Documentation = None
areaUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=areaUnit, enum_prefix=None)
areaUnit.in2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'in2', tag=u'in2')
areaUnit.ft2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'ft2', tag=u'ft2')
areaUnit.yd2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'yd2', tag=u'yd2')
areaUnit.acre = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'acre', tag=u'acre')
areaUnit.mile2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'mile2', tag=u'mile2')
areaUnit.mm2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'mm2', tag=u'mm2')
areaUnit.cm2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'cm2', tag=u'cm2')
areaUnit.m2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'm2', tag=u'm2')
areaUnit.km2 = areaUnit._CF_enumeration.addEnumeration(unicode_value=u'km2', tag=u'km2')
areaUnit._InitializeFacetMap(areaUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'areaUnit', areaUnit)
# Atomic simple type: {iFAB}volumeUnit
class volumeUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'volumeUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 167, 2)
_Documentation = None
volumeUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=volumeUnit, enum_prefix=None)
volumeUnit.in3 = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'in3', tag=u'in3')
volumeUnit.ft3 = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'ft3', tag=u'ft3')
volumeUnit.fl_oz = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'fl oz', tag=u'fl_oz')
volumeUnit.pt = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'pt', tag=u'pt')
volumeUnit.gal = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'gal', tag=u'gal')
volumeUnit.mL = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'mL', tag=u'mL')
volumeUnit.L = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'L', tag=u'L')
volumeUnit.mm3 = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'mm3', tag=u'mm3')
volumeUnit.cm3 = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'cm3', tag=u'cm3')
volumeUnit.m3 = volumeUnit._CF_enumeration.addEnumeration(unicode_value=u'm3', tag=u'm3')
volumeUnit._InitializeFacetMap(volumeUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'volumeUnit', volumeUnit)
# Atomic simple type: {iFAB}massUnit
class massUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'massUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 182, 2)
_Documentation = None
massUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=massUnit, enum_prefix=None)
massUnit.oz = massUnit._CF_enumeration.addEnumeration(unicode_value=u'oz', tag=u'oz')
massUnit.lb = massUnit._CF_enumeration.addEnumeration(unicode_value=u'lb', tag=u'lb')
massUnit.ton = massUnit._CF_enumeration.addEnumeration(unicode_value=u'ton', tag=u'ton')
massUnit.mg = massUnit._CF_enumeration.addEnumeration(unicode_value=u'mg', tag=u'mg')
massUnit.g = massUnit._CF_enumeration.addEnumeration(unicode_value=u'g', tag=u'g')
massUnit.kg = massUnit._CF_enumeration.addEnumeration(unicode_value=u'kg', tag=u'kg')
massUnit.tonne = massUnit._CF_enumeration.addEnumeration(unicode_value=u'tonne', tag=u'tonne')
massUnit._InitializeFacetMap(massUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'massUnit', massUnit)
# Atomic simple type: {iFAB}forceUnit
class forceUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'forceUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 194, 2)
_Documentation = None
forceUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=forceUnit, enum_prefix=None)
forceUnit.N = forceUnit._CF_enumeration.addEnumeration(unicode_value=u'N', tag=u'N')
forceUnit._InitializeFacetMap(forceUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'forceUnit', forceUnit)
# Atomic simple type: {iFAB}torqueUnit
class torqueUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'torqueUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 200, 2)
_Documentation = None
torqueUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=torqueUnit, enum_prefix=None)
torqueUnit.N_m = torqueUnit._CF_enumeration.addEnumeration(unicode_value=u'N m', tag=u'N_m')
torqueUnit._InitializeFacetMap(torqueUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'torqueUnit', torqueUnit)
# Atomic simple type: {iFAB}temperatureUnit
class temperatureUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'temperatureUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 206, 2)
_Documentation = None
temperatureUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=temperatureUnit, enum_prefix=None)
temperatureUnit.F = temperatureUnit._CF_enumeration.addEnumeration(unicode_value=u'F', tag=u'F')
temperatureUnit.C = temperatureUnit._CF_enumeration.addEnumeration(unicode_value=u'C', tag=u'C')
temperatureUnit._InitializeFacetMap(temperatureUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'temperatureUnit', temperatureUnit)
# Atomic simple type: {iFAB}hardnessUnit
class hardnessUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'hardnessUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 213, 2)
_Documentation = None
hardnessUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=hardnessUnit, enum_prefix=None)
hardnessUnit.bh = hardnessUnit._CF_enumeration.addEnumeration(unicode_value=u'bh', tag=u'bh')
hardnessUnit._InitializeFacetMap(hardnessUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'hardnessUnit', hardnessUnit)
# Atomic simple type: {iFAB}angleUnit
class angleUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'angleUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 219, 2)
_Documentation = None
angleUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=angleUnit, enum_prefix=None)
angleUnit.degrees = angleUnit._CF_enumeration.addEnumeration(unicode_value=u'degrees', tag=u'degrees')
angleUnit.radians = angleUnit._CF_enumeration.addEnumeration(unicode_value=u'radians', tag=u'radians')
angleUnit._InitializeFacetMap(angleUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'angleUnit', angleUnit)
# Atomic simple type: {iFAB}pressureUnit
class pressureUnit (pyxb.binding.datatypes.normalizedString, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'pressureUnit')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 226, 2)
_Documentation = None
pressureUnit._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=pressureUnit, enum_prefix=None)
pressureUnit.Pa = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'Pa', tag=u'Pa')
pressureUnit.kPa = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'kPa', tag=u'kPa')
pressureUnit.MPa = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'MPa', tag=u'MPa')
pressureUnit.GPa = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'GPa', tag=u'GPa')
pressureUnit.psi = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'psi', tag=u'psi')
pressureUnit.ksi = pressureUnit._CF_enumeration.addEnumeration(unicode_value=u'ksi', tag=u'ksi')
pressureUnit._InitializeFacetMap(pressureUnit._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'pressureUnit', pressureUnit)
# Complex type {iFAB}assemblyDetails with content type ELEMENT_ONLY
class assemblyDetails_ (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}assemblyDetails with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'assemblyDetails')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 14, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element assemblyDetail uses Python identifier assemblyDetail
__assemblyDetail = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'assemblyDetail'), 'assemblyDetail', '__iFAB_assemblyDetails__assemblyDetail', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 16, 6), )
assemblyDetail = property(__assemblyDetail.value, __assemblyDetail.set, None, None)
_ElementMap.update({
__assemblyDetail.name() : __assemblyDetail
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'assemblyDetails', assemblyDetails_)
# Complex type {iFAB}incidentalContact with content type EMPTY
class incidentalContact (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}incidentalContact with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'incidentalContact')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 38, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'incidentalContact', incidentalContact)
# Complex type {iFAB}mechanical with content type ELEMENT_ONLY
class mechanical (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}mechanical with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'mechanical')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 43, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linkingPart uses Python identifier linkingPart
__linkingPart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'linkingPart'), 'linkingPart', '__iFAB_mechanical_linkingPart', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 45, 6), )
linkingPart = property(__linkingPart.value, __linkingPart.set, None, None)
# Element notes uses Python identifier notes
__notes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'notes'), 'notes', '__iFAB_mechanical_notes', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 46, 6), )
notes = property(__notes.value, __notes.set, None, None)
# Element fasteningMethod uses Python identifier fasteningMethod
__fasteningMethod = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fasteningMethod'), 'fasteningMethod', '__iFAB_mechanical_fasteningMethod', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 47, 6), )
fasteningMethod = property(__fasteningMethod.value, __fasteningMethod.set, None, None)
# Element fasteningQuantity uses Python identifier fasteningQuantity
__fasteningQuantity = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fasteningQuantity'), 'fasteningQuantity', '__iFAB_mechanical_fasteningQuantity', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 48, 6), )
fasteningQuantity = property(__fasteningQuantity.value, __fasteningQuantity.set, None, None)
# Element torque uses Python identifier torque
__torque = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'torque'), 'torque', '__iFAB_mechanical_torque', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 49, 6), )
torque = property(__torque.value, __torque.set, None, None)
# Element force uses Python identifier force
__force = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'force'), 'force', '__iFAB_mechanical_force', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 50, 6), )
force = property(__force.value, __force.set, None, None)
_ElementMap.update({
__linkingPart.name() : __linkingPart,
__notes.name() : __notes,
__fasteningMethod.name() : __fasteningMethod,
__fasteningQuantity.name() : __fasteningQuantity,
__torque.name() : __torque,
__force.name() : __force
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'mechanical', mechanical)
# Complex type {iFAB}welded with content type ELEMENT_ONLY
class welded (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}welded with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'welded')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 54, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linkingPart uses Python identifier linkingPart
__linkingPart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'linkingPart'), 'linkingPart', '__iFAB_welded_linkingPart', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 56, 6), )
linkingPart = property(__linkingPart.value, __linkingPart.set, None, None)
# Element notes uses Python identifier notes
__notes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'notes'), 'notes', '__iFAB_welded_notes', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 57, 6), )
notes = property(__notes.value, __notes.set, None, None)
# Element length uses Python identifier length
__length = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'length'), 'length', '__iFAB_welded_length', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 58, 6), )
length = property(__length.value, __length.set, None, None)
# Element jointType uses Python identifier jointType
__jointType = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'jointType'), 'jointType', '__iFAB_welded_jointType', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 59, 6), )
jointType = property(__jointType.value, __jointType.set, None, None)
# Element weldType uses Python identifier weldType
__weldType = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'weldType'), 'weldType', '__iFAB_welded_weldType', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 60, 6), )
weldType = property(__weldType.value, __weldType.set, None, None)
# Element weldPenetration uses Python identifier weldPenetration
__weldPenetration = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'weldPenetration'), 'weldPenetration', '__iFAB_welded_weldPenetration', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 61, 6), )
weldPenetration = property(__weldPenetration.value, __weldPenetration.set, None, None)
# Element twoSided uses Python identifier twoSided
__twoSided = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'twoSided'), 'twoSided', '__iFAB_welded_twoSided', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 62, 6), )
twoSided = property(__twoSided.value, __twoSided.set, None, None)
# Element inspectionRequirement uses Python identifier inspectionRequirement
__inspectionRequirement = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'inspectionRequirement'), 'inspectionRequirement', '__iFAB_welded_inspectionRequirement', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 63, 6), )
inspectionRequirement = property(__inspectionRequirement.value, __inspectionRequirement.set, None, None)
# Element part1Thickness uses Python identifier part1Thickness
__part1Thickness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part1Thickness'), 'part1Thickness', '__iFAB_welded_part1Thickness', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 64, 6), )
part1Thickness = property(__part1Thickness.value, __part1Thickness.set, None, None)
# Element part1Material uses Python identifier part1Material
__part1Material = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part1Material'), 'part1Material', '__iFAB_welded_part1Material', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 65, 6), )
part1Material = property(__part1Material.value, __part1Material.set, None, None)
# Element part2Thickness uses Python identifier part2Thickness
__part2Thickness = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part2Thickness'), 'part2Thickness', '__iFAB_welded_part2Thickness', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 66, 6), )
part2Thickness = property(__part2Thickness.value, __part2Thickness.set, None, None)
# Element part2Material uses Python identifier part2Material
__part2Material = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part2Material'), 'part2Material', '__iFAB_welded_part2Material', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 67, 6), )
part2Material = property(__part2Material.value, __part2Material.set, None, None)
_ElementMap.update({
__linkingPart.name() : __linkingPart,
__notes.name() : __notes,
__length.name() : __length,
__jointType.name() : __jointType,
__weldType.name() : __weldType,
__weldPenetration.name() : __weldPenetration,
__twoSided.name() : __twoSided,
__inspectionRequirement.name() : __inspectionRequirement,
__part1Thickness.name() : __part1Thickness,
__part1Material.name() : __part1Material,
__part2Thickness.name() : __part2Thickness,
__part2Material.name() : __part2Material
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'welded', welded)
# Complex type {iFAB}soldered with content type ELEMENT_ONLY
class soldered (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}soldered with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'soldered')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 71, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linkingPart uses Python identifier linkingPart
__linkingPart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'linkingPart'), 'linkingPart', '__iFAB_soldered_linkingPart', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 73, 6), )
linkingPart = property(__linkingPart.value, __linkingPart.set, None, None)
# Element notes uses Python identifier notes
__notes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'notes'), 'notes', '__iFAB_soldered_notes', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 74, 6), )
notes = property(__notes.value, __notes.set, None, None)
# Element length uses Python identifier length
__length = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'length'), 'length', '__iFAB_soldered_length', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 75, 6), )
length = property(__length.value, __length.set, None, None)
# Element fillerMaterial uses Python identifier fillerMaterial
__fillerMaterial = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fillerMaterial'), 'fillerMaterial', '__iFAB_soldered_fillerMaterial', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 76, 6), )
fillerMaterial = property(__fillerMaterial.value, __fillerMaterial.set, None, None)
# Element fluxMaterial uses Python identifier fluxMaterial
__fluxMaterial = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fluxMaterial'), 'fluxMaterial', '__iFAB_soldered_fluxMaterial', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 77, 6), )
fluxMaterial = property(__fluxMaterial.value, __fluxMaterial.set, None, None)
_ElementMap.update({
__linkingPart.name() : __linkingPart,
__notes.name() : __notes,
__length.name() : __length,
__fillerMaterial.name() : __fillerMaterial,
__fluxMaterial.name() : __fluxMaterial
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'soldered', soldered)
# Complex type {iFAB}brazed with content type ELEMENT_ONLY
class brazed (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}brazed with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'brazed')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 81, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linkingPart uses Python identifier linkingPart
__linkingPart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'linkingPart'), 'linkingPart', '__iFAB_brazed_linkingPart', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 83, 6), )
linkingPart = property(__linkingPart.value, __linkingPart.set, None, None)
# Element notes uses Python identifier notes
__notes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'notes'), 'notes', '__iFAB_brazed_notes', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 84, 6), )
notes = property(__notes.value, __notes.set, None, None)
# Element length uses Python identifier length
__length = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'length'), 'length', '__iFAB_brazed_length', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 85, 6), )
length = property(__length.value, __length.set, None, None)
# Element fillerMaterial uses Python identifier fillerMaterial
__fillerMaterial = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fillerMaterial'), 'fillerMaterial', '__iFAB_brazed_fillerMaterial', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 86, 6), )
fillerMaterial = property(__fillerMaterial.value, __fillerMaterial.set, None, None)
# Element fluxMaterial uses Python identifier fluxMaterial
__fluxMaterial = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'fluxMaterial'), 'fluxMaterial', '__iFAB_brazed_fluxMaterial', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 87, 6), )
fluxMaterial = property(__fluxMaterial.value, __fluxMaterial.set, None, None)
_ElementMap.update({
__linkingPart.name() : __linkingPart,
__notes.name() : __notes,
__length.name() : __length,
__fillerMaterial.name() : __fillerMaterial,
__fluxMaterial.name() : __fluxMaterial
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'brazed', brazed)
# Complex type {iFAB}glued with content type ELEMENT_ONLY
class glued (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}glued with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'glued')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 91, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element linkingPart uses Python identifier linkingPart
__linkingPart = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'linkingPart'), 'linkingPart', '__iFAB_glued_linkingPart', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 93, 6), )
linkingPart = property(__linkingPart.value, __linkingPart.set, None, None)
# Element notes uses Python identifier notes
__notes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'notes'), 'notes', '__iFAB_glued_notes', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 94, 6), )
notes = property(__notes.value, __notes.set, None, None)
# Element length uses Python identifier length
__length = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'length'), 'length', '__iFAB_glued_length', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 95, 6), )
length = property(__length.value, __length.set, None, None)
# Element volume uses Python identifier volume
__volume = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'volume'), 'volume', '__iFAB_glued_volume', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 96, 6), )
volume = property(__volume.value, __volume.set, None, None)
# Element material uses Python identifier material
__material = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'material'), 'material', '__iFAB_glued_material', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 97, 6), )
material = property(__material.value, __material.set, None, None)
_ElementMap.update({
__linkingPart.name() : __linkingPart,
__notes.name() : __notes,
__length.name() : __length,
__volume.name() : __volume,
__material.name() : __material
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'glued', glued)
# Complex type {iFAB}assemblyDetail with content type ELEMENT_ONLY
class assemblyDetail (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}assemblyDetail with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'assemblyDetail')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 20, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'name'), 'name', '__iFAB_assemblyDetail_name', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 22, 6), )
name = property(__name.value, __name.set, None, None)
# Element part1 uses Python identifier part1
__part1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part1'), 'part1', '__iFAB_assemblyDetail_part1', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 23, 6), )
part1 = property(__part1.value, __part1.set, None, None)
# Element part2 uses Python identifier part2
__part2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'part2'), 'part2', '__iFAB_assemblyDetail_part2', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 24, 6), )
part2 = property(__part2.value, __part2.set, None, None)
# Element description uses Python identifier description
__description = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'description'), 'description', '__iFAB_assemblyDetail_description', False, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 25, 6), )
description = property(__description.value, __description.set, None, None)
# Element incidentalContact uses Python identifier incidentalContact
__incidentalContact = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'incidentalContact'), 'incidentalContact', '__iFAB_assemblyDetail_incidentalContact', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 27, 8), )
incidentalContact = property(__incidentalContact.value, __incidentalContact.set, None, None)
# Element mechanical uses Python identifier mechanical
__mechanical = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'mechanical'), 'mechanical', '__iFAB_assemblyDetail_mechanical', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 28, 8), )
mechanical = property(__mechanical.value, __mechanical.set, None, None)
# Element welded uses Python identifier welded
__welded = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'welded'), 'welded', '__iFAB_assemblyDetail_welded', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 29, 8), )
welded = property(__welded.value, __welded.set, None, None)
# Element soldered uses Python identifier soldered
__soldered = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'soldered'), 'soldered', '__iFAB_assemblyDetail_soldered', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 30, 8), )
soldered = property(__soldered.value, __soldered.set, None, None)
# Element brazed uses Python identifier brazed
__brazed = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'brazed'), 'brazed', '__iFAB_assemblyDetail_brazed', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 31, 8), )
brazed = property(__brazed.value, __brazed.set, None, None)
# Element glued uses Python identifier glued
__glued = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'glued'), 'glued', '__iFAB_assemblyDetail_glued', True, pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 32, 8), )
glued = property(__glued.value, __glued.set, None, None)
# Attribute {iFAB}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, u'id'), 'id', '__iFAB_assemblyDetail_iFABid', guid, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 10, 2)
__id._UseLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 35, 4)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__name.name() : __name,
__part1.name() : __part1,
__part2.name() : __part2,
__description.name() : __description,
__incidentalContact.name() : __incidentalContact,
__mechanical.name() : __mechanical,
__welded.name() : __welded,
__soldered.name() : __soldered,
__brazed.name() : __brazed,
__glued.name() : __glued
})
_AttributeMap.update({
__id.name() : __id
})
Namespace.addCategoryObject('typeBinding', u'assemblyDetail', assemblyDetail)
# Complex type {iFAB}partReference with content type SIMPLE
class partReference (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}partReference with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'partReference')
_XSDLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 163, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute {iFAB}id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(Namespace, u'id'), 'id', '__iFAB_partReference_iFABid', guid, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 10, 2)
__id._UseLocation = pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 166, 8)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id
})
Namespace.addCategoryObject('typeBinding', u'partReference', partReference)
# Complex type {iFAB}duration with content type SIMPLE
class duration (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}duration with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'duration')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 26, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_duration_unit', timeUnit, unicode_default=u'day')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 29, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 29, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'duration', duration)
# Complex type {iFAB}price with content type SIMPLE
class price (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}price with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'price')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 34, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute currency uses Python identifier currency
__currency = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'currency'), 'currency', '__iFAB_price_currency', currency, unicode_default=u'USD')
__currency._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 37, 8)
__currency._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 37, 8)
currency = property(__currency.value, __currency.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__currency.name() : __currency
})
Namespace.addCategoryObject('typeBinding', u'price', price)
# Complex type {iFAB}length with content type SIMPLE
class length (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}length with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'length')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 42, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_length_unit', lengthUnit, unicode_default=u'mm')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 45, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 45, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'length', length)
# Complex type {iFAB}area with content type SIMPLE
class area (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}area with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'area')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 50, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_area_unit', areaUnit, unicode_default=u'mm2')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 53, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 53, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'area', area)
# Complex type {iFAB}volume with content type SIMPLE
class volume (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}volume with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'volume')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 58, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_volume_unit', volumeUnit, unicode_default=u'mm3')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 61, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 61, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'volume', volume)
# Complex type {iFAB}weight with content type SIMPLE
class weight (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}weight with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'weight')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 66, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_weight_unit', massUnit, unicode_default=u'g')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 69, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 69, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'weight', weight)
# Complex type {iFAB}force with content type SIMPLE
class force (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}force with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'force')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 74, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_force_unit', forceUnit, unicode_default=u'N')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 77, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 77, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'force', force)
# Complex type {iFAB}torque with content type SIMPLE
class torque (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}torque with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'torque')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 82, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_torque_unit', torqueUnit, unicode_default=u'N m')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 85, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 85, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'torque', torque)
# Complex type {iFAB}hardness with content type SIMPLE
class hardness (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}hardness with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'hardness')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 90, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_hardness_unit', hardnessUnit, unicode_default=u'bh')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 93, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 93, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'hardness', hardness)
# Complex type {iFAB}angle with content type SIMPLE
class angle (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}angle with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'angle')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 98, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_angle_unit', angleUnit, unicode_default=u'degrees')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 101, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 101, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'angle', angle)
# Complex type {iFAB}temperature with content type SIMPLE
class temperature (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}temperature with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.decimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'temperature')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 106, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.decimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_temperature_unit', temperatureUnit, unicode_default=u'C')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 109, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 109, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'temperature', temperature)
# Complex type {iFAB}pressure with content type SIMPLE
class pressure (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {iFAB}pressure with content type SIMPLE"""
_TypeDefinition = nonNegativeDecimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'pressure')
_XSDLocation = pyxb.utils.utility.Location(u'common.xsd', 114, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is nonNegativeDecimal
# Attribute unit uses Python identifier unit
__unit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'unit'), 'unit', '__iFAB_pressure_unit', pressureUnit, unicode_default=u'Pa')
__unit._DeclarationLocation = pyxb.utils.utility.Location(u'common.xsd', 117, 8)
__unit._UseLocation = pyxb.utils.utility.Location(u'common.xsd', 117, 8)
unit = property(__unit.value, __unit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__unit.name() : __unit
})
Namespace.addCategoryObject('typeBinding', u'pressure', pressure)
assemblyDetails = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, u'assemblyDetails'), assemblyDetails_, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 12, 2))
Namespace.addCategoryObject('elementBinding', assemblyDetails.name().localName(), assemblyDetails)
assemblyDetails_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'assemblyDetail'), assemblyDetail, scope=assemblyDetails_, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 16, 6)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 15, 4))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetails_._UseForTag(pyxb.namespace.ExpandedName(None, u'assemblyDetail')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 16, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
assemblyDetails_._Automaton = _BuildAutomaton()
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'linkingPart'), partReference, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 45, 6)))
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'notes'), pyxb.binding.datatypes.string, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 46, 6)))
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fasteningMethod'), fasteningMethod, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 47, 6)))
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fasteningQuantity'), pyxb.binding.datatypes.positiveInteger, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 48, 6), unicode_default=u'1'))
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'torque'), torque, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 49, 6)))
mechanical._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'force'), force, scope=mechanical, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 50, 6)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 45, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 46, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 48, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 49, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 50, 6))
counters.add(cc_4)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'linkingPart')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 45, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'notes')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 46, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'fasteningMethod')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 47, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'fasteningQuantity')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 48, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'torque')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 49, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(mechanical._UseForTag(pyxb.namespace.ExpandedName(None, u'force')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 50, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
mechanical._Automaton = _BuildAutomaton_()
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'linkingPart'), partReference, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 56, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'notes'), pyxb.binding.datatypes.string, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 57, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'length'), length, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 58, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'jointType'), jointType, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 59, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'weldType'), weldType, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 60, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'weldPenetration'), weldPenetration, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 61, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'twoSided'), pyxb.binding.datatypes.boolean, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 62, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'inspectionRequirement'), inspectionRequirement, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 63, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part1Thickness'), length, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 64, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part1Material'), baseMaterial, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 65, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part2Thickness'), length, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 66, 6)))
welded._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part2Material'), baseMaterial, scope=welded, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 67, 6)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 56, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 57, 6))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'linkingPart')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 56, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'notes')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 57, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'length')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 58, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'jointType')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 59, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'weldType')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 60, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'weldPenetration')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 61, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'twoSided')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 62, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'inspectionRequirement')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 63, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'part1Thickness')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 64, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'part1Material')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 65, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = None
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'part2Thickness')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 66, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
symbol = pyxb.binding.content.ElementUse(welded._UseForTag(pyxb.namespace.ExpandedName(None, u'part2Material')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 67, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
]))
st_10._set_transitionSet(transitions)
transitions = []
st_11._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
welded._Automaton = _BuildAutomaton_2()
soldered._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'linkingPart'), partReference, scope=soldered, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 73, 6)))
soldered._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'notes'), pyxb.binding.datatypes.string, scope=soldered, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 74, 6)))
soldered._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'length'), length, scope=soldered, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 75, 6)))
soldered._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fillerMaterial'), fillerMaterial, scope=soldered, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 76, 6)))
soldered._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fluxMaterial'), fluxMaterial, scope=soldered, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 77, 6)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 73, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 74, 6))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(soldered._UseForTag(pyxb.namespace.ExpandedName(None, u'linkingPart')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 73, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(soldered._UseForTag(pyxb.namespace.ExpandedName(None, u'notes')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 74, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(soldered._UseForTag(pyxb.namespace.ExpandedName(None, u'length')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 75, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(soldered._UseForTag(pyxb.namespace.ExpandedName(None, u'fillerMaterial')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 76, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(soldered._UseForTag(pyxb.namespace.ExpandedName(None, u'fluxMaterial')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 77, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
soldered._Automaton = _BuildAutomaton_3()
brazed._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'linkingPart'), partReference, scope=brazed, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 83, 6)))
brazed._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'notes'), pyxb.binding.datatypes.string, scope=brazed, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 84, 6)))
brazed._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'length'), length, scope=brazed, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 85, 6)))
brazed._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fillerMaterial'), fillerMaterial, scope=brazed, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 86, 6)))
brazed._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'fluxMaterial'), fluxMaterial, scope=brazed, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 87, 6)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 83, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 84, 6))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(brazed._UseForTag(pyxb.namespace.ExpandedName(None, u'linkingPart')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 83, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(brazed._UseForTag(pyxb.namespace.ExpandedName(None, u'notes')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 84, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(brazed._UseForTag(pyxb.namespace.ExpandedName(None, u'length')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 85, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(brazed._UseForTag(pyxb.namespace.ExpandedName(None, u'fillerMaterial')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 86, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(brazed._UseForTag(pyxb.namespace.ExpandedName(None, u'fluxMaterial')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 87, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
brazed._Automaton = _BuildAutomaton_4()
glued._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'linkingPart'), partReference, scope=glued, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 93, 6)))
glued._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'notes'), pyxb.binding.datatypes.string, scope=glued, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 94, 6)))
glued._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'length'), length, scope=glued, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 95, 6)))
glued._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'volume'), volume, scope=glued, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 96, 6)))
glued._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'material'), glue, scope=glued, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 97, 6)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 93, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 94, 6))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(glued._UseForTag(pyxb.namespace.ExpandedName(None, u'linkingPart')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 93, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(glued._UseForTag(pyxb.namespace.ExpandedName(None, u'notes')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 94, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(glued._UseForTag(pyxb.namespace.ExpandedName(None, u'length')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 95, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(glued._UseForTag(pyxb.namespace.ExpandedName(None, u'volume')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 96, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(glued._UseForTag(pyxb.namespace.ExpandedName(None, u'material')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 97, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
]))
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
glued._Automaton = _BuildAutomaton_5()
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'name'), pyxb.binding.datatypes.string, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 22, 6)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part1'), partReference, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 23, 6)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'part2'), partReference, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 24, 6)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'description'), pyxb.binding.datatypes.string, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 25, 6)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'incidentalContact'), incidentalContact, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 27, 8)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'mechanical'), mechanical, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 28, 8)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'welded'), welded, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 29, 8)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'soldered'), soldered, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 30, 8)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'brazed'), brazed, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 31, 8)))
assemblyDetail._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'glued'), glued, scope=assemblyDetail, location=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 32, 8)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 22, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 25, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=None, metadata=pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 26, 6))
counters.add(cc_2)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'name')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 22, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'part1')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 23, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'part2')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 24, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'description')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 25, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'incidentalContact')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 27, 8))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'mechanical')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 28, 8))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'welded')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 29, 8))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'soldered')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 30, 8))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'brazed')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 31, 8))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(assemblyDetail._UseForTag(pyxb.namespace.ExpandedName(None, u'glued')), pyxb.utils.utility.Location(u'AssemblyDetails.xsd', 32, 8))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_1, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_2, True) ]))
st_9._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
assemblyDetail._Automaton = _BuildAutomaton_6()
|
{
"content_hash": "af2da780d9069f14c6480b2edcab23bb",
"timestamp": "",
"source": "github",
"line_count": 2054,
"max_line_length": 263,
"avg_line_length": 52.34664070107108,
"alnum_prop": 0.7158110119047619,
"repo_name": "pombredanne/metamorphosys-desktop",
"id": "271401ac0cfe09b7f6bbb15851745c85f5b5221d",
"size": "107571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metamorphosys/META/meta/DesignDataPackage/lib/python/iFAB/__init__.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "10683"
},
{
"name": "Assembly",
"bytes": "117345"
},
{
"name": "Awk",
"bytes": "3591"
},
{
"name": "Batchfile",
"bytes": "228118"
},
{
"name": "BitBake",
"bytes": "4526"
},
{
"name": "C",
"bytes": "3613212"
},
{
"name": "C#",
"bytes": "11617773"
},
{
"name": "C++",
"bytes": "51448188"
},
{
"name": "CMake",
"bytes": "3055"
},
{
"name": "CSS",
"bytes": "109563"
},
{
"name": "Clojure",
"bytes": "37831"
},
{
"name": "Eagle",
"bytes": "3782687"
},
{
"name": "Emacs Lisp",
"bytes": "8514"
},
{
"name": "GAP",
"bytes": "49124"
},
{
"name": "Groff",
"bytes": "2178"
},
{
"name": "Groovy",
"bytes": "7686"
},
{
"name": "HTML",
"bytes": "4025250"
},
{
"name": "Inno Setup",
"bytes": "35715"
},
{
"name": "Java",
"bytes": "489537"
},
{
"name": "JavaScript",
"bytes": "167454"
},
{
"name": "Lua",
"bytes": "1660"
},
{
"name": "Makefile",
"bytes": "97209"
},
{
"name": "Mathematica",
"bytes": "26"
},
{
"name": "Matlab",
"bytes": "80874"
},
{
"name": "Max",
"bytes": "78198"
},
{
"name": "Modelica",
"bytes": "44541139"
},
{
"name": "Objective-C",
"bytes": "34004"
},
{
"name": "Perl",
"bytes": "19285"
},
{
"name": "PostScript",
"bytes": "400254"
},
{
"name": "PowerShell",
"bytes": "19749"
},
{
"name": "Processing",
"bytes": "1477"
},
{
"name": "Prolog",
"bytes": "3121"
},
{
"name": "Protocol Buffer",
"bytes": "58995"
},
{
"name": "Python",
"bytes": "5517835"
},
{
"name": "Ruby",
"bytes": "4483"
},
{
"name": "Shell",
"bytes": "956773"
},
{
"name": "Smarty",
"bytes": "37892"
},
{
"name": "TeX",
"bytes": "4183594"
},
{
"name": "Visual Basic",
"bytes": "22546"
},
{
"name": "XSLT",
"bytes": "332312"
}
],
"symlink_target": ""
}
|
options = dict(
verbose=True,
model_extensions=[
'h2o.model.extensions.ScoringHistoryTrees',
'h2o.model.extensions.VariableImportance',
'h2o.model.extensions.Trees',
],
)
deprecated_params = dict(offset_column=None)
doc = dict(
__class__="""
Builds a Distributed Random Forest (DRF) on a parsed dataset, for regression or
classification.
"""
)
examples = dict(
training_frame="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8],
... seed=1234)
>>> cars_drf = H2ORandomForestEstimator(seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.auc(valid=True)
""",
validation_frame="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8],
... seed=1234)
>>> cars_drf = H2ORandomForestEstimator(seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.auc(valid=True)
""",
nfolds="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> folds = 5
>>> cars_drf = H2ORandomForestEstimator(nfolds=folds,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=cars)
>>> cars_drf.auc(xval=True)
""",
keep_cross_validation_models="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(keep_cross_validation_models=True,
... nfolds=5,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train)
>>> cars_drf.auc()
""",
keep_cross_validation_predictions="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(keep_cross_validation_predictions=True,
... nfolds=5,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train)
>>> cars_drf.cross_validation_predictions()
""",
keep_cross_validation_fold_assignment="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(keep_cross_validation_fold_assignment=True,
... nfolds=5,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train)
>>> cars_drf.cross_validation_fold_assignment()
""",
score_each_iteration="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(score_each_iteration=True,
... ntrees=55,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame = valid)
>>> cars_drf.scoring_history()
""",
score_tree_interval="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(score_tree_interval=5,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.scoring_history()
""",
fold_assignment="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> assignment_type = "Random"
>>> cars_drf = H2ORandomForestEstimator(fold_assignment=assignment_type,
... nfolds=5,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=cars)
>>> cars_drf.auc(xval=True)
""",
fold_column="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> fold_numbers = cars.kfold_column(n_folds=5, seed=1234)
>>> fold_numbers.set_names(["fold_numbers"])
>>> cars = cars.cbind(fold_numbers)
>>> print(cars['fold_numbers'])
>>> cars_drf = H2ORandomForestEstimator(seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=cars,
... fold_column="fold_numbers")
>>> cars_drf.auc(xval=True)
""",
ignore_const_cols="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> cars["const_1"] = 6
>>> cars["const_2"] = 7
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(seed=1234,
... ignore_const_cols=True)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.auc(valid=True)
""",
weights_column="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8],
... seed=1234)
>>> cars_drf = H2ORandomForestEstimator(seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid,
... weights_column="weight")
>>> cars_drf.auc(valid=True)
""",
balance_classes="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8], seed=1234)
>>> cov_drf = H2ORandomForestEstimator(balance_classes=True,
... seed=1234)
>>> cov_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('logloss', cov_drf.logloss(valid=True))
""",
class_sampling_factors="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8], seed=1234)
>>> print(covtype[54].table())
>>> sample_factors = [1., 0.5, 1., 1., 1., 1., 1.]
>>> cov_drf = H2ORandomForestEstimator(balance_classes=True,
... class_sampling_factors=sample_factors,
... seed=1234)
>>> cov_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('logloss', cov_drf.logloss(valid=True))
""",
max_after_balance_size="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8], seed=1234)
>>> print(covtype[54].table())
>>> max = .85
>>> cov_drf = H2ORandomForestEstimator(balance_classes=True,
... max_after_balance_size=max,
... seed=1234)
>>> cov_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('logloss', cov_drf.logloss(valid=True))
""",
ntrees="""
>>> titanic = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> titanic['survived'] = titanic['survived'].asfactor()
>>> predictors = titanic.columns
>>> del predictors[1:3]
>>> response = 'survived'
>>> train, valid = titanic.split_frame(ratios=[.8],
... seed=1234)
>>> tree_num = [20, 50, 80, 110,
... 140, 170, 200]
>>> label = ["20", "50", "80", "110",
... "140", "170", "200"]
>>> for key, num in enumerate(tree_num):
# Input an integer for 'num' and 'key'
>>> titanic_drf = H2ORandomForestEstimator(ntrees=num,
... seed=1234)
>>> titanic_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(label[key], 'training score',
... titanic_drf.auc(train=True))
>>> print(label[key], 'validation score',
... titanic_drf.auc(valid=True))
""",
max_depth="""
>>> df = h2o.import_file(path = "http://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/titanic.csv")
>>> response = "survived"
>>> df[response] = df[response].asfactor()
>>> predictors = df.columns
>>> del predictors[1:3]
>>> train, valid, test = df.split_frame(ratios=[0.6,0.2],
... seed=1234,
... destination_frames=
... ['train.hex','valid.hex','test.hex'])
>>> drf = H2ORandomForestEstimator()
>>> drf.train(x=predictors,
... y=response,
... training_frame=train)
>>> perf = drf.model_performance(valid)
>>> print perf.auc()
""",
min_rows="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(min_rows=16,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cars_drf.auc(valid=True))
""",
nbins="""
>>> eeg = h2o.import_file("https://h2o-public-test-data.s3.amazonaws.com/smalldata/eeg/eeg_eyestate.csv")
>>> eeg['eyeDetection'] = eeg['eyeDetection'].asfactor()
>>> predictors = eeg.columns[:-1]
>>> response = 'eyeDetection'
>>> train, valid = eeg.split_frame(ratios=[.8], seed=1234)
>>> bin_num = [16, 32, 64, 128, 256, 512]
>>> label = ["16", "32", "64", "128", "256", "512"]
>>> for key, num in enumerate(bin_num):
# Insert integer for 'num' and 'key'
>>> eeg_drf = H2ORandomForestEstimator(nbins=num, seed=1234)
>>> eeg_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(label[key], 'training score',
... eeg_drf.auc(train=True))
>>> print(label[key], 'validation score',
... eeg_drf.auc(train=True))
""",
nbins_top_level="""
>>> eeg = h2o.import_file("https://h2o-public-test-data.s3.amazonaws.com/smalldata/eeg/eeg_eyestate.csv")
>>> eeg['eyeDetection'] = eeg['eyeDetection'].asfactor()
>>> predictors = eeg.columns[:-1]
>>> response = 'eyeDetection'
>>> train, valid = eeg.split_frame(ratios=[.8],
... seed=1234)
>>> bin_num = [32, 64, 128, 256, 512,
... 1024, 2048, 4096]
>>> label = ["32", "64", "128", "256",
... "512", "1024", "2048", "4096"]
>>> for key, num in enumerate(bin_num):
# Insert integer for 'num' and 'key'
>>> eeg_drf = H2ORandomForestEstimator(nbins_top_level=32,
... seed=1234)
>>> eeg_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(label[key], 'training score',
... eeg_gbm.auc(train=True))
>>> print(label[key], 'validation score',
... eeg_gbm.auc(valid=True))
""",
nbins_cats="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> bin_num = [8, 16, 32, 64, 128, 256,
... 512, 1024, 2048, 4096]
>>> label = ["8", "16", "32", "64", "128",
... "256", "512", "1024", "2048", "4096"]
>>> for key, num in enumerate(bin_num):
# Insert integer for 'num' and 'key'
>>> airlines_drf = H2ORandomForestEstimator(nbins_cats=num,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(label[key], 'training score',
... airlines_gbm.auc(train=True))
>>> print(label[key], 'validation score',
... airlines_gbm.auc(valid=True))
""",
stopping_rounds="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_drf.auc(valid=True)
""",
stopping_metric="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_drf.auc(valid=True)
""",
stopping_tolerance="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(stopping_metric="auc",
... stopping_rounds=3,
... stopping_tolerance=1e-2,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_drf.auc(valid=True)
""",
max_runtime_secs="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(max_runtime_secs=10,
... ntrees=10000,
... max_depth=10,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.auc(valid = True)
""",
seed="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> drf_w_seed_1 = H2ORandomForestEstimator(seed=1234)
>>> drf_w_seed_1.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('auc for the 1st model build with a seed:',
... drf_w_seed_1.auc(valid=True))
""",
build_tree_one_node="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(build_tree_one_node=True,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.auc(valid=True)
""",
mtries="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8], seed=1234)
>>> cov_drf = H2ORandomForestEstimator(mtries=30, seed=1234)
>>> cov_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('logloss', cov_drf.logloss(valid=True))
""",
sample_rate="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8],
... seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(sample_rate=.7,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_drf.auc(valid=True))
""",
sample_rate_per_class="""
>>> covtype = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/covtype/covtype.20k.data")
>>> covtype[54] = covtype[54].asfactor()
>>> predictors = covtype.columns[0:54]
>>> response = 'C55'
>>> train, valid = covtype.split_frame(ratios=[.8],
... seed=1234)
>>> print(train[response].table())
>>> rate_per_class_list = [1, .4, 1, 1, 1, 1, 1]
>>> cov_drf = H2ORandomForestEstimator(sample_rate_per_class=rate_per_class_list,
... seed=1234)
>>> cov_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('logloss', cov_drf.logloss(valid=True))
""",
binomial_double_trees="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(binomial_double_trees=False,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('without binomial_double_trees:',
... cars_drf.auc(valid=True))
>>> cars_drf_2 = H2ORandomForestEstimator(binomial_double_trees=True,
... seed=1234)
>>> cars_drf_2.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print('with binomial_double_trees:', cars_drf_2.auc(valid=True))
""",
checkpoint="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8],
... seed=1234)
>>> cars_drf = H2ORandomForestEstimator(ntrees=1,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cars_drf.auc(valid=True))
""",
col_sample_rate_change_per_level="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(col_sample_rate_change_per_level=.9,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_drf.auc(valid=True))
""",
col_sample_rate_per_tree="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(col_sample_rate_per_tree=.7,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_drf.auc(valid=True))
""",
min_split_improvement="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> cars["economy_20mpg"] = cars["economy_20mpg"].asfactor()
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "economy_20mpg"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(min_split_improvement=1e-3,
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(cars_drf.auc(valid=True))
""",
histogram_type="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> airlines_drf = H2ORandomForestEstimator(histogram_type="UniformAdaptive",
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> print(airlines_drf.auc(valid=True))
""",
categorical_encoding="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip")
>>> airlines["Year"] = airlines["Year"].asfactor()
>>> airlines["Month"] = airlines["Month"].asfactor()
>>> airlines["DayOfWeek"] = airlines["DayOfWeek"].asfactor()
>>> airlines["Cancelled"] = airlines["Cancelled"].asfactor()
>>> airlines['FlightNum'] = airlines['FlightNum'].asfactor()
>>> predictors = ["Origin", "Dest", "Year", "UniqueCarrier",
... "DayOfWeek", "Month", "Distance", "FlightNum"]
>>> response = "IsDepDelayed"
>>> train, valid= airlines.split_frame(ratios=[.8], seed=1234)
>>> encoding = "one_hot_explicit"
>>> airlines_drf = H2ORandomForestEstimator(categorical_encoding=encoding,
... seed=1234)
>>> airlines_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> airlines_drf.auc(valid=True)
""",
calibrate_model="""
>>> ecology = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/ecology_model.csv")
>>> ecology['Angaus'] = ecology['Angaus'].asfactor()
>>> from h2o.estimators.random_forest import H2ORandomForestEstimator
>>> response = 'Angaus'
>>> predictors = ecology.columns[3:13]
>>> train, calib = ecology.split_frame(seed=12354)
>>> w = h2o.create_frame(binary_fraction=1,
... binary_ones_fraction=0.5,
... missing_fraction=0,
... rows=744, cols=1)
>>> w.set_names(["weight"])
>>> train = train.cbind(w)
>>> ecology_drf = H2ORandomForestEstimator(ntrees=10,
... max_depth=5,
... min_rows=10,
... distribution="multinomial",
... weights_column="weight",
... calibrate_model=True,
... calibration_frame=calib)
>>> ecology_drf.train(x=predictors,
... y="Angaus",
... training_frame=train)
>>> predicted = ecology_drf.predict(calib)
""",
calibration_frame="""
>>> ecology = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/ecology_model.csv")
>>> ecology['Angaus'] = ecology['Angaus'].asfactor()
>>> response = 'Angaus'
>>> predictors = ecology.columns[3:13]
>>> train, calib = ecology.split_frame(seed = 12354)
>>> w = h2o.create_frame(binary_fraction=1,
... binary_ones_fraction=0.5,
... missing_fraction=0,
... rows=744, cols=1)
>>> w.set_names(["weight"])
>>> train = train.cbind(w)
>>> ecology_drf = H2ORandomForestEstimator(ntrees=10,
... max_depth=5,
... min_rows=10,
... distribution="multinomial",
... calibrate_model=True,
... calibration_frame=calib)
>>> ecology_drf.train(x=predictors,
... y="Angaus,
... training_frame=train,
... weights_column="weight")
>>> predicted = ecology_drf.predict(train)
""",
distribution="""
>>> cars = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv")
>>> predictors = ["displacement","power","weight","acceleration","year"]
>>> response = "cylinders"
>>> train, valid = cars.split_frame(ratios=[.8], seed=1234)
>>> cars_drf = H2ORandomForestEstimator(distribution="poisson",
... seed=1234)
>>> cars_drf.train(x=predictors,
... y=response,
... training_frame=train,
... validation_frame=valid)
>>> cars_drf.mse(valid=True)
""",
export_checkpoints_dir="""
>>> import tempfile
>>> from os import listdir
>>> from h2o.grid.grid_search import H2OGridSearch
>>> airlines = h2o.import_file("http://s3.amazonaws.com/h2o-public-test-data/smalldata/airlines/allyears2k_headers.zip", destination_frame="air.hex")
>>> predictors = ["DayofMonth", "DayOfWeek"]
>>> response = "IsDepDelayed"
>>> hyper_parameters = {'ntrees': [5,10]}
>>> search_crit = {'strategy': "RandomDiscrete",
... 'max_models': 5,
... 'seed': 1234,
... 'stopping_rounds': 3,
... 'stopping_metric': "AUTO",
... 'stopping_tolerance': 1e-2}
>>> checkpoints_dir = tempfile.mkdtemp()
>>> air_grid = H2OGridSearch(H2ORandomForestEstimator,
... hyper_params=hyper_parameters,
... search_criteria=search_crit)
>>> air_grid.train(x=predictors,
... y=response,
... training_frame=airlines,
... distribution="bernoulli",
... max_depth=3,
... export_checkpoints_dir=checkpoints_dir)
>>> num_files = len(listdir(checkpoints_dir))
>>> num_files
""",
check_constant_response="""
>>> train = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/iris/iris_train.csv")
>>> train["constantCol"] = 1
>>> my_drf = H2ORandomForestEstimator(check_constant_response=False)
>>> my_drf.train(x=list(range(1,5)),
... y="constantCol",
... training_frame=train)
""",
gainslift_bins="""
>>> airlines= h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/testng/airlines_train.csv")
>>> model = H2ORandomForestEstimator(ntrees=1, gainslift_bins=20)
>>> model.train(x=["Origin", "Distance"],
... y="IsDepDelayed",
... training_frame=airlines)
>>> model.gains_lift()
"""
)
|
{
"content_hash": "59b5b395577629129db99fdb98ce7279",
"timestamp": "",
"source": "github",
"line_count": 766,
"max_line_length": 149,
"avg_line_length": 46.265013054830284,
"alnum_prop": 0.5736335675385874,
"repo_name": "michalkurka/h2o-3",
"id": "34799dd1b039c0e4c9a16575cfb49e0e2cb0f332",
"size": "35439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-bindings/bin/custom/python/gen_drf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12629"
},
{
"name": "CSS",
"bytes": "231770"
},
{
"name": "CoffeeScript",
"bytes": "7550"
},
{
"name": "Dockerfile",
"bytes": "10302"
},
{
"name": "Emacs Lisp",
"bytes": "2226"
},
{
"name": "Groovy",
"bytes": "166480"
},
{
"name": "HCL",
"bytes": "15007"
},
{
"name": "HTML",
"bytes": "251906"
},
{
"name": "HiveQL",
"bytes": "3965"
},
{
"name": "Java",
"bytes": "11932863"
},
{
"name": "JavaScript",
"bytes": "89484"
},
{
"name": "Jupyter Notebook",
"bytes": "13867219"
},
{
"name": "Makefile",
"bytes": "50635"
},
{
"name": "Python",
"bytes": "6801044"
},
{
"name": "R",
"bytes": "3223113"
},
{
"name": "Ruby",
"bytes": "3506"
},
{
"name": "Scala",
"bytes": "33647"
},
{
"name": "Shell",
"bytes": "186559"
},
{
"name": "TeX",
"bytes": "634412"
}
],
"symlink_target": ""
}
|
__author__ = "Vasyl Khomenko"
__copyright__ = "Copyright 2013, Qubell.com"
__license__ = "Apache"
__version__ = "1.0.1"
__email__ = "vkhomenko@qubell.com"
import os
from qubell.api.testing import *
def manifest(name):
return os.path.realpath(os.path.join(os.path.dirname(__file__), name))
@environment({
'default': {
'policies': [{'action': 'provisionVms', 'parameter': 'imageId', 'value': 'reg/ami-777'},
{'action': 'provisionVms', 'parameter': 'vmIdentity', 'value': 'ubuntu7'}]
},
'custom': {
'policies': [{'action': 'provisionVms', 'parameter': 'imageId', 'value': 'reg/ami-888'},
{'action': 'provisionVms', 'parameter': 'vmIdentity', 'value': 'ubuntu8'}],
'markers': ['test-marker'],
'properties': [{'name': 'testprop', 'type': 'string', 'value': 'test-prop value'}]
}})
class SandboxClassTest(BaseComponentTestCase):
name = 'SelfSandboxTest'
apps = [{"name": name,
"file": manifest('default.yml'),
"settings": {"destroyInterval": 300000},
"parameters": {
"in.app_input": "dddd"}
}]
# noinspection PyShadowingNames
@instance(byApplication=name)
def test_instance(self, instance):
assert 'Active' == instance.status
app = self.organization.applications[self.name]
assert instance in app.instances
def test_env(self):
default_env = self.organization.environments['default'].json()
custom_env = self.organization.environments['custom'].json()
assert 'reg/ami-777' in [x['value'] for x in default_env['policies']]
assert 'ubuntu7' in [x['value'] for x in default_env['policies']]
assert 'ubuntu8' in [x['value'] for x in custom_env['policies']]
assert 'reg/ami-888' in [x['value'] for x in custom_env['policies']]
assert 'test-marker' in [x['name'] for x in custom_env['markers']]
assert 'test-prop value' in [x['value'] for x in custom_env['properties']]
|
{
"content_hash": "1a65c116ae48d5e4f274ada4d933d042",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 96,
"avg_line_length": 38.60377358490566,
"alnum_prop": 0.5899315738025416,
"repo_name": "vasichkin/contrib-python-qubell-client",
"id": "e54e55ab8d5470cdaea5e76a476a0a00ca6ce281",
"size": "2643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integration_tests/test_sandbox.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "355841"
},
{
"name": "Shell",
"bytes": "153"
}
],
"symlink_target": ""
}
|
"""Fake LDAP server for test harness.
This class does very little error checking, and knows nothing about ldap
class definitions. It implements the minimum emulation of the python ldap
library to work with nova.
"""
import fnmatch
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
class Store(object):
def __init__(self):
if hasattr(self.__class__, '_instance'):
raise Exception(_('Attempted to instantiate singleton'))
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = _StorageDict()
return cls._instance
class _StorageDict(dict):
def keys(self, pat=None):
ret = super(_StorageDict, self).keys()
if pat is not None:
ret = fnmatch.filter(ret, pat)
return ret
def delete(self, key):
try:
del self[key]
except KeyError:
pass
def flushdb(self):
self.clear()
def hgetall(self, key):
"""Returns the hash for the given key
Creates the hash if the key doesn't exist.
"""
try:
return self[key]
except KeyError:
self[key] = {}
return self[key]
def hget(self, key, field):
hashdict = self.hgetall(key)
try:
return hashdict[field]
except KeyError:
hashdict[field] = {}
return hashdict[field]
def hset(self, key, field, val):
hashdict = self.hgetall(key)
hashdict[field] = val
def hmset(self, key, value_dict):
hashdict = self.hgetall(key)
for field, val in value_dict.items():
hashdict[field] = val
SCOPE_BASE = 0
SCOPE_ONELEVEL = 1 # Not implemented
SCOPE_SUBTREE = 2
MOD_ADD = 0
MOD_DELETE = 1
MOD_REPLACE = 2
class NO_SUCH_OBJECT(Exception): # pylint: disable=C0103
"""Duplicate exception class from real LDAP module."""
pass
class OBJECT_CLASS_VIOLATION(Exception): # pylint: disable=C0103
"""Duplicate exception class from real LDAP module."""
pass
class SERVER_DOWN(Exception): # pylint: disable=C0103
"""Duplicate exception class from real LDAP module."""
pass
def initialize(_uri):
"""Opens a fake connection with an LDAP server."""
return FakeLDAP()
def _match_query(query, attrs):
"""Match an ldap query to an attribute dictionary.
The characters &, |, and ! are supported in the query. No syntax checking
is performed, so malformed queries will not work correctly.
"""
# cut off the parentheses
inner = query[1:-1]
if inner.startswith('&'):
# cut off the &
l, r = _paren_groups(inner[1:])
return _match_query(l, attrs) and _match_query(r, attrs)
if inner.startswith('|'):
# cut off the |
l, r = _paren_groups(inner[1:])
return _match_query(l, attrs) or _match_query(r, attrs)
if inner.startswith('!'):
# cut off the ! and the nested parentheses
return not _match_query(query[2:-1], attrs)
(k, _sep, v) = inner.partition('=')
return _match(k, v, attrs)
def _paren_groups(source):
"""Split a string into parenthesized groups."""
count = 0
start = 0
result = []
for pos in xrange(len(source)):
if source[pos] == '(':
if count == 0:
start = pos
count += 1
if source[pos] == ')':
count -= 1
if count == 0:
result.append(source[start:pos + 1])
return result
def _match(key, value, attrs):
"""Match a given key and value against an attribute list."""
if key not in attrs:
return False
# This is a wild card search. Implemented as all or nothing for now.
if value == "*":
return True
if key != "objectclass":
return value in attrs[key]
# it is an objectclass check, so check subclasses
values = _subs(value)
for v in values:
if v in attrs[key]:
return True
return False
def _subs(value):
"""Returns a list of subclass strings.
The strings represent the ldap object class plus any subclasses that
inherit from it. Fakeldap doesn't know about the ldap object structure,
so subclasses need to be defined manually in the dictionary below.
"""
subs = {'groupOfNames': ['novaProject']}
if value in subs:
return [value] + subs[value]
return [value]
def _from_json(encoded):
"""Convert attribute values from json representation.
Args:
encoded -- a json encoded string
Returns a list of strings
"""
return [str(x) for x in jsonutils.loads(encoded)]
def _to_json(unencoded):
"""Convert attribute values into json representation.
Args:
unencoded -- an unencoded string or list of strings. If it
is a single string, it will be converted into a list.
Returns a json string
"""
return jsonutils.dumps(list(unencoded))
server_fail = False
class FakeLDAP(object):
"""Fake LDAP connection."""
def simple_bind_s(self, dn, password):
"""This method is ignored, but provided for compatibility."""
if server_fail:
raise SERVER_DOWN()
pass
def unbind_s(self):
"""This method is ignored, but provided for compatibility."""
if server_fail:
raise SERVER_DOWN()
pass
def add_s(self, dn, attr):
"""Add an object with the specified attributes at dn."""
if server_fail:
raise SERVER_DOWN()
key = "%s%s" % (self.__prefix, dn)
value_dict = dict([(k, _to_json(v)) for k, v in attr])
Store.instance().hmset(key, value_dict)
def delete_s(self, dn):
"""Remove the ldap object at specified dn."""
if server_fail:
raise SERVER_DOWN()
Store.instance().delete("%s%s" % (self.__prefix, dn))
def modify_s(self, dn, attrs):
"""Modify the object at dn using the attribute list.
:param dn: a dn
:param attrs: a list of tuples in the following form::
([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value)
"""
if server_fail:
raise SERVER_DOWN()
store = Store.instance()
key = "%s%s" % (self.__prefix, dn)
for cmd, k, v in attrs:
values = _from_json(store.hget(key, k))
if cmd == MOD_ADD:
values.append(v)
elif cmd == MOD_REPLACE:
values = [v]
else:
values.remove(v)
values = store.hset(key, k, _to_json(values))
def modrdn_s(self, dn, newrdn):
oldobj = self.search_s(dn, SCOPE_BASE)
if not oldobj:
raise NO_SUCH_OBJECT()
newdn = "%s,%s" % (newrdn, dn.partition(',')[2])
newattrs = oldobj[0][1]
modlist = []
for attrtype in newattrs.keys():
modlist.append((attrtype, newattrs[attrtype]))
self.add_s(newdn, modlist)
self.delete_s(dn)
def search_s(self, dn, scope, query=None, fields=None):
"""Search for all matching objects under dn using the query.
Args:
dn -- dn to search under
scope -- only SCOPE_BASE and SCOPE_SUBTREE are supported
query -- query to filter objects by
fields -- fields to return. Returns all fields if not specified
"""
if server_fail:
raise SERVER_DOWN()
if scope != SCOPE_BASE and scope != SCOPE_SUBTREE:
raise NotImplementedError(str(scope))
store = Store.instance()
if scope == SCOPE_BASE:
pattern = "%s%s" % (self.__prefix, dn)
keys = store.keys(pattern)
else:
keys = store.keys("%s*%s" % (self.__prefix, dn))
if not keys:
raise NO_SUCH_OBJECT()
objects = []
for key in keys:
# get the attributes from the store
attrs = store.hgetall(key)
# turn the values from the store into lists
# pylint: disable=E1103
attrs = dict([(k, _from_json(v))
for k, v in attrs.iteritems()])
# filter the objects by query
if not query or _match_query(query, attrs):
# filter the attributes by fields
attrs = dict([(k, v) for k, v in attrs.iteritems()
if not fields or k in fields])
objects.append((key[len(self.__prefix):], attrs))
return objects
@property
def __prefix(self): # pylint: disable=R0201
"""Get the prefix to use for all keys."""
return 'ldap:'
|
{
"content_hash": "5944466bc759b73f5d152ae3353856d2",
"timestamp": "",
"source": "github",
"line_count": 314,
"max_line_length": 77,
"avg_line_length": 27.85031847133758,
"alnum_prop": 0.5726700971983991,
"repo_name": "cloudbau/nova",
"id": "ddaef8a5f8878ef61dd9ab269e66c53bfcb81bda",
"size": "9521",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "nova/tests/fake_ldap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13426937"
},
{
"name": "Shell",
"bytes": "17194"
}
],
"symlink_target": ""
}
|
import sys
import _parjtag
VERSION = "1.3"
DEBUG = 0 #disable debug messages by default
#frame specific consts
ERASE_MASS = 2
ERASE_MAIN = 1
ERASE_SGMT = 0
#states
FREERUNNING = 0
STOPPED = 1
#Configurations of the MSP430 driver
VERIFICATION_MODE = 0 #Verify data downloaded to FLASH memories.
RAMSIZE_OPTION = 1 #Change RAM used to download and program flash blocks
DEBUG_OPTION = 2 #Set debug level. Enables debug outputs.
#enumeration of output formats for uploads
HEX = 0
INTELHEX = 1
BINARY = 2
#exceptions
class JTAGException(Exception): pass
#for the use with memread
def hexdump( (adr, memstr) ):
"""Print a hex dump of data collected with memread
arg1: tuple with adress, memory
return None"""
count = 0
ascii = ''
for value in map(ord, memstr):
if not count: print "%04x: " % adr,
print "%02x" % value,
ascii += (32 <= value < 128) and chr(value) or '.'
count += 1
adr += 1
if count == 16:
count = 0
print " ", ascii
ascii = ''
if count < 16: print " "*(16-count), " ", ascii
def makeihex( (address, data) ):
"""work though the data and output lines in inzel hex format.
and end tag is appended"""
start = 0
while start<len(data):
end = start + 16
if end > len(data): end = len(data)
_ihexline(address, [ord(x) for x in data[start:end]])
start += 16
address += 16
_ihexline(address, [], type=1) #append no data but an end line
def _ihexline(address, buffer, type=0):
"""encode one line, output with checksum"""
sys.stdout.write( ':%02X%04X%02X' % (len(buffer), address & 0xffff, type) )
sum = len(buffer) + ((address >> 8) & 255) + (address & 255)
for b in buffer:
if b == None: b = 0 #substitute nonexistent values with zero
sys.stdout.write('%02X' % (b & 255))
sum += b&255
sys.stdout.write('%02X\n' %( (-sum) & 255))
class Segment:
"""store a string with memory contents along with its startaddress"""
def __init__(self, startaddress = 0, data=None):
if data is None:
self.data = ''
else:
self.data = data
self.startaddress = startaddress
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
def __repr__(self):
return "Segment(startaddress = 0x%04x, data=%r)" % (self.startaddress, self.data)
class Memory:
"""represent memory contents. with functions to load files"""
def __init__(self, filename=None):
self.segments = []
if filename:
self.filename = filename
self.loadFile(filename)
def append(self, seg):
self.segments.append(seg)
def __getitem__(self, index):
return self.segments[index]
def __len__(self):
return len(self.segments)
def loadIHex(self, file):
"""load data from a (opened) file in Intel-HEX format"""
segmentdata = []
currentAddr = 0
startAddr = 0
lines = file.readlines()
for l in lines:
if not l.strip(): continue #skip empty lines
if l[0] != ':': raise Exception("File Format Error\n")
l = l.strip() #fix CR-LF issues...
length = int(l[1:3],16)
address = int(l[3:7],16)
type = int(l[7:9],16)
check = int(l[-2:],16)
if type == 0x00:
if currentAddr != address:
if segmentdata:
self.segments.append( Segment(startAddr, ''.join(segmentdata)) )
startAddr = currentAddr = address
segmentdata = []
for i in range(length):
segmentdata.append( chr(int(l[9+2*i:11+2*i],16)) )
currentAddr = length + currentAddr
elif type == 0x01:
pass
else:
sys.stderr.write("Ignored unknown field (type 0x%02x) in ihex file.\n" % type)
if segmentdata:
self.segments.append( Segment(startAddr, ''.join(segmentdata)) )
def loadTIText(self, file):
"""load data from a (opened) file in TI-Text format"""
next = 1
currentAddr = 0
startAddr = 0
segmentdata = []
#Convert data for MSP430, TXT-File is parsed line by line
while next >= 1:
#Read one line
l = file.readline()
if not l: break #EOF
l = l.strip()
if l[0] == 'q': break
elif l[0] == '@': #if @ => new address => send frame and set new addr.
#create a new segment
if segmentdata:
self.segments.append( Segment(startAddr, ''.join(segmentdata)) )
startAddr = currentAddr = int(l[1:],16)
segmentdata = []
else:
for i in l.split():
segmentdata.append(chr(int(i,16)))
if segmentdata:
self.segments.append( Segment(startAddr, ''.join(segmentdata)) )
def loadELF(self, file):
"""load data from a (opened) file in ELF object format.
File must be seekable"""
import elf
obj = elf.ELFObject()
obj.fromFile(file)
if obj.e_type != elf.ELFObject.ET_EXEC:
raise Exception("No executable")
for section in obj.getSections():
if DEBUG:
sys.stderr.write("ELF section %s at 0x%04x %d bytes\n" % (section.name, section.lma, len(section.data)))
if len(section.data):
self.segments.append( Segment(section.lma, section.data) )
def loadFile(self, filename):
"""fill memory with the contents of a file. file type is determined from extension"""
#TODO: do a contents based detection
if filename[-4:].lower() == '.txt':
self.loadTIText(open(filename, "rb"))
elif filename[-4:].lower() in ('.a43', '.hex'):
self.loadIHex(open(filename, "rb"))
else:
self.loadELF(open(filename, "rb"))
def getMemrange(self, fromadr, toadr):
"""get a range of bytes from the memory. unavailable values are filled with 0xff."""
res = ''
toadr = toadr + 1 #python indxes are excluding end, so include it
while fromadr < toadr:
for seg in self.segments:
segend = seg.startaddress + len(seg.data)
if seg.startaddress <= fromadr and fromadr < segend:
if toadr > segend: #not all data in segment
catchlength = segend-fromadr
else:
catchlength = toadr-fromadr
res = res + seg.data[fromadr-seg.startaddress : fromadr-seg.startaddress+catchlength]
fromadr = fromadr + catchlength #adjust start
if len(res) >= toadr-fromadr:
break #return res
else: #undefined memory is filled with 0xff
res = res + chr(255)
fromadr = fromadr + 1 #adjust start
return res
class JTAG:
"""wrap the _parjtag extension"""
def __init__(self):
self.showprogess = 0
def connect(self, lpt=None):
"""connect to specified or default port"""
if lpt is None:
_parjtag.connect()
else:
_parjtag.connect(lpt)
def close(self):
"""release JTAG"""
_parjtag.release()
def uploadData(self, startaddress, size):
"""upload a datablock"""
if DEBUG > 1: sys.stderr.write("* uploadData()\n")
return _parjtag.memread(startaddress, size)
def actionMassErase(self):
"""Erase the flash memory completely (with mass erase command)"""
sys.stderr.write("Mass Erase...\n")
_parjtag.memerase(ERASE_MASS)
def actionMainErase(self):
"""Erase the MAIN flash memory, leave the INFO mem"""
sys.stderr.write("Erase Main Flash...\n")
_parjtag.memerase(ERASE_MAIN, 0xfffe)
def makeActionSegmentErase(self, address):
"""Selective segment erase"""
class SegmentEraser:
def __init__(self, segaddr):
self.address = segaddr
def __call__(self):
sys.stderr.write("Erase Segment @ 0x%04x...\n" % self.address)
_parjtag.memerase(ERASE_SGMT, self.address)
return SegmentEraser(address)
def actionEraseCheck(self):
"""check the erasure of required flash cells."""
sys.stderr.write("Erase Check by file ...\n")
if self.data is not None:
for seg in self.data:
data = _parjtag.memread(seg.startaddress, len(seg.data))
if data != '\xff'*len(seg.data): raise JTAGException("Erase check failed")
else:
raise JTAGException("cannot do erase check against data with not knowing the actual data")
def progess_update(self, count, total):
sys.stderr.write("\r%d%%" % (100*count/total))
def actionProgram(self):
"""program data into flash memory."""
if self.data is not None:
sys.stderr.write("Program ...\n")
if self.showprogess:
_parjtag.set_flash_callback(self.progess_update)
bytes = 0
for seg in self.data:
_parjtag.memwrite(seg.startaddress, seg.data)
bytes += len(seg.data)
if self.showprogess:
sys.stderr.write("\r")
sys.stderr.write("%i bytes programmed.\n" % bytes)
else:
raise JTAGException("programming without data not possible")
def actionVerify(self):
"""Verify programmed data"""
if self.data is not None:
sys.stderr.write("Verify ...\n")
for seg in self.data:
data = _parjtag.memread(seg.startaddress, len(seg.data))
if data != seg.data: raise JTAGException("Verify failed")
else:
raise JTAGException("verify without data not possible")
def actionReset(self):
"""perform a reset"""
sys.stderr.write("Reset device ...\n")
_parjtag.reset(0, 0)
def actionRun(self, address):
"""start program at specified address"""
raise NotImplementedError
#sys.stderr.write("Load PC with 0x%04x ...\n" % address)
def funclet(self):
"""download and start funclet"""
sys.stderr.write("Download and execute of funclet...\n")
if len(self.data) > 1:
raise JTAGException("don't know how to handle multiple segments in funclets")
_parjtag.funclet(self.data[0].data)
sys.stderr.write("Funclet OK.\n")
def usage():
"""print some help message"""
sys.stderr.write("""
USAGE: %s [options] [file]
Version: %s
If "-" is specified as file the data is read from the stdinput.
A file ending with ".txt" is considered to be in TIText format all
other filenames are considered IntelHex.
General options:
-h, --help Show this help screen.
-l, --lpt=name Specify an other parallel port.
(defaults to LPT1 (/dev/parport0 on unix)
-D, --debug Increase level of debug messages. This won't be
very useful for the average user...
-I, --intelhex Force fileformat to IntelHex
-T, --titext Force fileformat to be TIText
-f, --funclet The given file is a funclet (a small program to
be run in RAM)
-R, --ramsize Specify the amont of RAM to be used to program
flash (default 256).
Program Flow Specifiers:
-e, --masserase Mass Erase (clear all flash memory)
-m, --mainerase Erase main flash memory only
--eraseinfo Erase info flash memory only (0x1000-0x10ff)
--erase=address Selectively erase segment at the specified address
-E, --erasecheck Erase Check by file
-p, --program Program file
-v, --verify Verify by file
The order of the above options matters! The table is ordered by normal
execution order. For the options "Epv" a file must be specified.
Program flow specifiers default to "p" if a file is given.
Don't forget to specify "e" or "eE" when programming flash!
"p" already verifies the programmed data, "v" adds an additional
verification though uploading the written data for a 1:1 compare.
No default action is taken if "p" and/or "v" is given, say specifying
only "v" does a check by file of a programmed device.
Data retreiving:
-u, --upload=addr Upload a datablock (see also: -s).
-s, --size=num Size of the data block do upload. (Default is 2)
-x, --hex Show a hexadecimal display of the uploaded data.
(Default)
-b, --bin Get binary uploaded data. This can be used
to redirect the output into a file.
-i, --ihex Uploaded data is output in Intel HEX format.
This can be used to clone a device.
Do before exit:
-g, --go=address Start programm execution at specified address.
This implies option "w" (wait)
-r, --reset Reset connected MSP430. Starts application.
This is a normal device reset and will start
the programm that is specified in the reset
interrupt vector. (see also -g)
-w, --wait Wait for <ENTER> before closing parallel port.
""" % (sys.argv[0], VERSION))
def main():
global DEBUG
import getopt
filetype = None
filename = None
reset = 0
wait = 0
goaddr = None
jtag = JTAG()
toinit = []
todo = []
startaddr = None
size = 2
outputformat= HEX
lpt = None
funclet = None
ramsize = None
sys.stderr.write("MSP430 parallel JTAG programmer Version: %s\n" % VERSION)
try:
opts, args = getopt.getopt(sys.argv[1:],
"hl:weEmpvrg:Du:d:s:xbiITfR:S",
["help", "lpt=", "wait"
"masserase", "erasecheck", "mainerase", "program",
"erase=", "eraseinfo",
"verify", "reset", "go=", "debug",
"upload=", "download=", "size=", "hex", "bin", "ihex",
"intelhex", "titext", "funclet", "ramsize=", "progress"]
)
except getopt.GetoptError:
# print help information and exit:
usage()
sys.exit(2)
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-l", "--lpt"):
lpt = a
elif o in ("-w", "--wait"):
wait = 1
elif o in ("-e", "--masserase"):
toinit.append(jtag.actionMassErase) #Erase Flash
elif o in ("-E", "--erasecheck"):
toinit.append(jtag.actionEraseCheck) #Erase Check (by file)
elif o in ("-m", "--mainerase"):
toinit.append(jtag.actionMainErase) #Erase main Flash
elif o == "--erase":
try:
seg = int(a, 0)
toinit.append(jtag.makeActionSegmentErase(seg))
except ValueError:
sys.stderr.write("segment address must be a valid number in dec, hex or octal\n")
sys.exit(2)
elif o == "--eraseinfo":
toinit.append(jtag.makeActionSegmentErase(0x1000))
toinit.append(jtag.makeActionSegmentErase(0x1080))
elif o in ("-p", "--program"):
todo.append(jtag.actionProgram) #Program file
elif o in ("-v", "--verify"):
todo.append(jtag.actionVerify) #Verify file
elif o in ("-r", "--reset"):
reset = 1
elif o in ("-g", "--go"):
try:
goaddr = int(a, 0) #try to convert decimal
except ValueError:
sys.stderr.write("upload address must be a valid number in dec, hex or octal\n")
sys.exit(2)
elif o in ("-D", "--debug"):
DEBUG = DEBUG + 1
elif o in ("-u", "--upload"):
try:
startaddr = int(a, 0) #try to convert number of any base
except ValueError:
sys.stderr.write("upload address must be a valid number in dec, hex or octal\n")
sys.exit(2)
elif o in ("-s", "--size"):
try:
size = int(a, 0)
except ValueError:
sys.stderr.write("upload address must be a valid number in dec, hex or octal\n")
sys.exit(2)
#outut formats
elif o in ("-x", "--hex"):
outputformat = HEX
elif o in ("-b", "--bin"):
outputformat = BINARY
elif o in ("-i", "--ihex"):
outputformat = INTELHEX
#input formats
elif o in ("-I", "--intelhex"):
filetype = 0
elif o in ("-T", "--titext"):
filetype = 1
#others
elif o in ("-f", "--funclet"):
funclet = 1
elif o in ("-R", "--ramsize"):
try:
ramsize = int(a, 0)
except ValueError:
sys.stderr.write("ramsize must be a valid number in dec, hex or octal\n")
sys.exit(2)
elif o in ("-S", "--progress"):
jtag.showprogess = 1
if len(args) == 0:
sys.stderr.write("Use -h for help\n")
elif len(args) == 1: #a filename is given
if not funclet:
if not todo: #if there are no actions yet
todo.extend([ #add some useful actions...
jtag.actionProgram,
])
filename = args[0]
else: #number of args is wrong
usage()
sys.exit(2)
if DEBUG: #debug infos
sys.stderr.write("debug level set to %d\n" % DEBUG)
_parjtag.configure(DEBUG_OPTION, DEBUG)
sys.stderr.write("python version: %s\n" % sys.version)
#sanity check of options
if goaddr and reset:
sys.stderr.write("Warning: option --reset ignored as --go is specified!\n")
reset = 0
if startaddr and reset:
sys.stderr.write("Warning: option --reset ignored as --upload is specified!\n")
reset = 0
#prepare data to download
jtag.data = Memory() #prepare downloaded data
if filetype is not None: #if the filetype is given...
if filename is None:
raise ValueError("no filename but filetype specified")
if filename == '-': #get data from stdin
file = sys.stdin
else:
file = open(filename,"rb") #or from a file
if filetype == 0: #select load function
jtag.data.loadIHex(file) #intel hex
elif filetype == 1:
jtag.data.loadTIText(file) #TI's format
else:
raise ValueError("illegal filetype specified")
else: #no filetype given...
if filename == '-': #for stdin:
jtag.data.loadIHex(sys.stdin) #assume intel hex
elif filename:
jtag.data.loadFile(filename) #autodetect otherwise
if DEBUG > 5: sys.stderr.write("File: %r\n" % filename)
try:
jtag.connect(lpt) #try to open port
except IOError:
raise #do not handle here
else: #continue if open was successful
if ramsize is not None:
_parjtag.configure(RAMSIZE_OPTION, ramsize)
#initialization list
if toinit: #erase and erase check
if DEBUG: sys.stderr.write("Preparing device ...\n")
for f in toinit: f()
#work list
if todo:
if DEBUG > 0: #debug
#show a nice list of sheduled actions
sys.stderr.write("TODO list:\n")
for f in todo:
try:
sys.stderr.write(" %s\n" % f.func_name)
except AttributeError:
sys.stderr.write(" %r\n" % f)
for f in todo: f() #work through todo list
if reset: #reset device first if desired
jtag.actionReset()
if funclet is not None: #download and start funclet
jtag.funclet()
if goaddr is not None: #start user programm at specified address
jtag.actionRun(goaddr) #load PC and execute
#upload datablock and output
if startaddr is not None:
if goaddr: #if a program was started...
raise NotImplementedError
#TODO:
#sys.stderr.write("Waiting to device for reconnect for upload: ")
data = jtag.uploadData(startaddr, size) #upload data
if outputformat == HEX: #depending on output format
hexdump( (startaddr, data) ) #print a hex display
elif outputformat == INTELHEX:
makeihex( (startaddr, data) ) #ouput a intel-hex file
else:
sys.stdout.write(data) #binary output w/o newline!
wait = 0 #wait makes no sense as after the upload the device is still stopped
if wait: #wait at the end if desired
sys.stderr.write("Press <ENTER> ...\n") #display a prompt
raw_input() #wait for newline
_parjtag.reset(1, 1) #reset and release target
#~ jtag.actionReset()
jtag.close() #Release communication port
if __name__ == '__main__':
try:
main()
except SystemExit:
raise #let pass exit() calls
except KeyboardInterrupt:
if DEBUG: raise #show full trace in debug mode
sys.stderr.write("user abort.\n") #short messy in user mode
sys.exit(1) #set errorlevel for script usage
except Exception, msg: #every Exception is caught and displayed
if DEBUG: raise #show full trace in debug mode
sys.stderr.write("\nAn error occoured:\n%s\n" % msg) #short messy in user mode
sys.exit(1) #set errorlevel for script usage
|
{
"content_hash": "c2b2663e71dd03a13dfd3d133d23f771",
"timestamp": "",
"source": "github",
"line_count": 596,
"max_line_length": 120,
"avg_line_length": 39.48993288590604,
"alnum_prop": 0.5212440516655337,
"repo_name": "jcook/crazyIoT",
"id": "0cb556a91ded9a253ef1960f26ba0ad2af0a24b0",
"size": "23791",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "src/contiki-sensinode-cc-ports/platform/msb430/buildscripts/jtag/pyjtag/jtag.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "88132"
},
{
"name": "Awk",
"bytes": "95"
},
{
"name": "Batchfile",
"bytes": "56"
},
{
"name": "C",
"bytes": "11688262"
},
{
"name": "C#",
"bytes": "26815"
},
{
"name": "C++",
"bytes": "1582986"
},
{
"name": "CSS",
"bytes": "10705"
},
{
"name": "HTML",
"bytes": "19837"
},
{
"name": "Java",
"bytes": "2565443"
},
{
"name": "JavaScript",
"bytes": "13137"
},
{
"name": "Logos",
"bytes": "2053"
},
{
"name": "Makefile",
"bytes": "89570"
},
{
"name": "NSIS",
"bytes": "8364"
},
{
"name": "Objective-C",
"bytes": "1837"
},
{
"name": "PHP",
"bytes": "1528"
},
{
"name": "Perl",
"bytes": "87490"
},
{
"name": "Prolog",
"bytes": "116"
},
{
"name": "Python",
"bytes": "502167"
},
{
"name": "Scala",
"bytes": "368"
},
{
"name": "Shell",
"bytes": "4834"
},
{
"name": "Smalltalk",
"bytes": "7024"
},
{
"name": "XSLT",
"bytes": "4947"
}
],
"symlink_target": ""
}
|
import re
import sys
import unittest
import metricbeat
import getpass
import os
SYSTEM_CPU_FIELDS = ["idle.pct", "iowait.pct", "irq.pct", "nice.pct",
"softirq.pct", "steal.pct", "system.pct", "user.pct"]
SYSTEM_CPU_FIELDS_ALL = ["idle.pct", "idle.ticks", "iowait.pct", "iowait.ticks", "irq.pct", "irq.ticks", "nice.pct", "nice.ticks",
"softirq.pct", "softirq.ticks", "steal.pct", "steal.ticks", "system.pct", "system.ticks", "user.pct", "user.ticks"]
SYSTEM_LOAD_FIELDS = ["1", "5", "15", "norm.1", "norm.5", "norm.15"]
SYSTEM_CORE_FIELDS = ["id", "idle.pct", "iowait.pct", "irq.pct", "nice.pct",
"softirq.pct", "steal.pct", "system.pct", "user.pct"]
SYSTEM_CORE_FIELDS_ALL = SYSTEM_CORE_FIELDS + ["idle.ticks", "iowait.ticks", "irq.ticks", "nice.ticks",
"softirq.ticks", "steal.ticks", "system.ticks", "user.ticks"]
SYSTEM_DISKIO_FIELDS = ["name", "read.count", "write.count", "read.bytes",
"write.bytes", "read.time", "write.time", "io.time"]
SYSTEM_FILESYSTEM_FIELDS = ["available", "device_name", "files", "free",
"free_files", "mount_point", "total", "used.bytes",
"used.pct"]
SYSTEM_FSSTAT_FIELDS = ["count", "total_files", "total_size"]
SYSTEM_MEMORY_FIELDS = ["swap", "actual.free", "free", "total", "used.bytes", "used.pct", "actual.used.bytes",
"actual.used.pct"]
SYSTEM_NETWORK_FIELDS = ["name", "out.bytes", "in.bytes", "out.packets",
"in.packets", "in.error", "out.error", "in.dropeed", "out.dropped"]
# cmdline is also part of the system process fields, but it may not be present
# for some kernel level processes.
SYSTEM_PROCESS_FIELDS = ["cpu", "memory", "name", "pid", "ppid", "pgid", "state",
"username"]
class SystemTest(metricbeat.BaseTest):
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_cpu(self):
"""
Test cpu system output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["cpu"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertEqual(len(output), 1)
evt = output[0]
self.assert_fields_are_documented(evt)
cpu = evt["system"]["cpu"]
self.assertItemsEqual(self.de_dot(SYSTEM_CPU_FIELDS), cpu.keys())
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_cpu_ticks_option(self):
"""
Test cpu_ticks configuration option.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["cpu"],
"period": "5s",
"extras": {
"cpu_ticks": True,
},
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
cpuStats = evt["system"]["cpu"]
self.assertItemsEqual(self.de_dot(SYSTEM_CPU_FIELDS_ALL), cpuStats.keys())
@unittest.skipUnless(re.match("(?i)linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_core(self):
"""
Test core system output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["core"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
core = evt["system"]["core"]
self.assertItemsEqual(self.de_dot(SYSTEM_CORE_FIELDS), core.keys())
@unittest.skipUnless(re.match("(?i)linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_core_with_cpu_ticks(self):
"""
Test core system output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["core"],
"period": "5s",
"extras": {
"cpu_ticks": True,
},
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
core = evt["system"]["core"]
self.assertItemsEqual(self.de_dot(SYSTEM_CORE_FIELDS_ALL), core.keys())
@unittest.skipUnless(re.match("(?i)linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_load(self):
"""
Test system load.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["load"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertEqual(len(output), 1)
evt = output[0]
self.assert_fields_are_documented(evt)
cpu = evt["system"]["load"]
self.assertItemsEqual(self.de_dot(SYSTEM_LOAD_FIELDS), cpu.keys())
@unittest.skipUnless(re.match("(?i)win|linux|freebsd", sys.platform), "os")
def test_diskio(self):
"""
Test system/diskio output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["diskio"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
diskio = evt["system"]["diskio"]
self.assertItemsEqual(self.de_dot(SYSTEM_DISKIO_FIELDS), diskio.keys())
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_filesystem(self):
"""
Test system/filesystem output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["filesystem"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
filesystem = evt["system"]["filesystem"]
self.assertItemsEqual(self.de_dot(SYSTEM_FILESYSTEM_FIELDS), filesystem.keys())
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_fsstat(self):
"""
Test system/fsstat output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["fsstat"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertEqual(len(output), 1)
evt = output[0]
self.assert_fields_are_documented(evt)
fsstat = evt["system"]["fsstat"]
self.assertItemsEqual(SYSTEM_FSSTAT_FIELDS, fsstat.keys())
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd|openbsd", sys.platform), "os")
def test_memory(self):
"""
Test system memory output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["memory"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertEqual(len(output), 1)
evt = output[0]
self.assert_fields_are_documented(evt)
memory = evt["system"]["memory"]
self.assertItemsEqual(self.de_dot(SYSTEM_MEMORY_FIELDS), memory.keys())
# Check that percentages are calculated.
mem = memory
if mem["total"] != 0:
used_p = float(mem["used"]["bytes"]) / mem["total"]
self.assertAlmostEqual(mem["used"]["pct"], used_p, places=4)
swap = memory["swap"]
if swap["total"] != 0:
used_p = float(swap["used"]["bytes"]) / swap["total"]
self.assertAlmostEqual(swap["used"]["pct"], used_p, places=4)
@unittest.skipUnless(re.match("(?i)darwin|win|linux|freebsd", sys.platform), "os")
def test_network(self):
"""
Test system/network output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["network"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
for evt in output:
self.assert_fields_are_documented(evt)
network = evt["system"]["network"]
self.assertItemsEqual(self.de_dot(SYSTEM_NETWORK_FIELDS), network.keys())
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd", sys.platform), "os")
def test_process(self):
"""
Test system/process output.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["process"],
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
# Ensure no errors or warnings exist in the log.
log = self.get_log()
self.assertNotRegexpMatches(log, "ERR|WARN")
output = self.read_output_json()
self.assertGreater(len(output), 0)
found_cmdline = False
for evt in output:
self.assert_fields_are_documented(evt)
process = evt["system"]["process"]
cmdline = process.pop("cmdline", None)
if cmdline is not None:
found_cmdline = True
self.assertItemsEqual(SYSTEM_PROCESS_FIELDS, process.keys())
self.assertTrue(found_cmdline, "cmdline not found in any process events")
@unittest.skipUnless(re.match("(?i)win|linux|darwin|freebsd", sys.platform), "os")
def test_process_metricbeat(self):
"""
Checks that the per proc stats are found in the output and
have the expected types.
"""
self.render_config_template(modules=[{
"name": "system",
"metricsets": ["process"],
"period": "5s",
"processes": ["(?i)metricbeat.test"]
}])
metricbeat = self.start_beat()
self.wait_until(lambda: self.output_count(lambda x: x >= 1))
metricbeat.check_kill_and_wait()
output = self.read_output()[0]
assert re.match("(?i)metricbeat.test(.exe)?", output["system.process.name"])
assert re.match("(?i).*metricbeat.test(.exe)? -systemTest", output["system.process.cmdline"])
assert isinstance(output["system.process.state"], basestring)
assert isinstance(output["system.process.cpu.start_time"], basestring)
self.check_username(output["system.process.username"])
def check_username(self, observed, expected = None):
if expected == None:
expected = getpass.getuser()
if os.name == 'nt':
parts = observed.split("\\", 2)
assert len(parts) == 2, "Expected proc.username to be of form DOMAIN\username, but was %s" % observed
observed = parts[1]
assert expected == observed, "proc.username = %s, but expected %s" % (observed, expected)
|
{
"content_hash": "138de1688f7cab9d189af7834df0ec90",
"timestamp": "",
"source": "github",
"line_count": 387,
"max_line_length": 136,
"avg_line_length": 35.974160206718345,
"alnum_prop": 0.564574055451803,
"repo_name": "phenomenes/varnishbeat",
"id": "ce1c29b3ed9b4788eae6615d1966910eded905db",
"size": "13922",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/github.com/elastic/beats/metricbeat/tests/system/test_system.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Go",
"bytes": "3820"
},
{
"name": "Makefile",
"bytes": "930"
},
{
"name": "Python",
"bytes": "825"
}
],
"symlink_target": ""
}
|
"""
Stub file to work around django bug: https://code.djangoproject.com/ticket/7198
"""
|
{
"content_hash": "3fccbb8f29b0d7cc00bc3546dfd8377a",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 79,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.7272727272727273,
"repo_name": "dreamhost/akanda-horizon",
"id": "dfb074ed6022b367452fc45c2569b80ff1c8be7a",
"size": "696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "akanda/horizon/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "151"
},
{
"name": "JavaScript",
"bytes": "122"
},
{
"name": "Python",
"bytes": "141454"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('form_processor', '0026_xforminstancesql_initial_processing_complete'),
('form_processor', '0027_allow_null_form_uuid_in_case_transaction'),
]
operations = [
]
|
{
"content_hash": "e17e12ed7d4ea4eb5090a9ce8b7a7355",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 80,
"avg_line_length": 23.357142857142858,
"alnum_prop": 0.6819571865443425,
"repo_name": "qedsoftware/commcare-hq",
"id": "538e0a9b18ed56a42c9349668f525f5ca4a2c213",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/form_processor/migrations/0028_merge.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
}
|
"""Install boost headers into a list of toolchains, in such a way that it gets
pulled into the SDK installer. Note that this script only installs boost
headers, and does not build any of the boost libraries that require building.
"""
import build_utils
import os
import shutil
import sys
import tarfile
import tempfile
import urllib
from optparse import OptionParser
# The original boost distro can be found here:
# http://sourceforge.net/projects/boost/files/boost/1.47.0/\
# boost_1_47_0.tar.gz/download
BOOST_URL = ('http://commondatastorage.googleapis.com/nativeclient-mirror'
'/nacl/boost_1_47_0.tar.gz')
BOOST_PATH = 'boost_1_47_0'
def DownloadAndExtract(working_dir, url, path):
boost_path = os.path.abspath(os.path.join(working_dir, path))
print 'Download: %s' % url
try:
(tgz_file, headers) = urllib.urlretrieve(url, '%s.tgz' % boost_path)
tar = None
try:
tar = tarfile.open(tgz_file)
tar.extractall(working_dir)
finally:
if tar:
tar.close()
except (URLError, ContentTooShortError):
print 'Error retrieving %s' % url
raise
# Install the boost headers into the toolchains.
def InstallBoost(options):
# Create a temporary working directory. This is where all the tar files go
# and where the packages get built prior to installation in the toolchain.
working_dir = tempfile.mkdtemp(prefix='boost')
try:
DownloadAndExtract(working_dir, BOOST_URL, BOOST_PATH)
except:
print "Error in download"
return 1
boost_include = options.third_party_dir
build_utils.ForceMakeDirs(boost_include)
boost_path = os.path.abspath(os.path.join(working_dir, BOOST_PATH))
# Copy the headers.
print 'Installing boost headers into %s...' % boost_include
dst_include_dir = os.path.join(boost_include, 'boost')
shutil.rmtree(dst_include_dir, ignore_errors=True)
shutil.copytree(os.path.join(boost_path, 'boost'),
dst_include_dir,
symlinks=True)
# Copy the license file.
print 'Installing boost license...'
shutil.copy(os.path.join(boost_path, 'LICENSE_1_0.txt'), dst_include_dir)
# Clean up.
shutil.rmtree(working_dir, ignore_errors=True)
return 0
# Parse the command-line args and set up the options object. There is one
# command-line switch:
# --toolchain=<path to the platform-specific toolchain>
# e.g.: --toolchain=../toolchain/mac-x86
# default is 'toolchain'.
# --toolchain can appear more than once, the Boost library is
# installed into each toolchain listed.
def main(argv):
parser = OptionParser()
parser.add_option(
'-t', '--toolchain', dest='toolchains',
action='append',
type='string',
help='NaCl toolchain directory')
parser.add_option(
'--third-party', dest='third_party_dir',
type='string',
default='third_party',
help='location of third_party directory')
(options, args) = parser.parse_args(argv)
if args:
print 'WARNING: unrecognized argument: %s' % str(args)
parser.print_help()
if not options.toolchains:
options.toolchains = [build_utils.TOOLCHAIN_AUTODETECT]
options.toolchains = [build_utils.NormalizeToolchain(tc)
for tc in options.toolchains]
print "Installing boost into %s" % str(options.third_party_dir)
return InstallBoost(options)
if __name__ == '__main__':
main(sys.argv[1:])
|
{
"content_hash": "cc931ed76e0f489668995b1ebb75e225",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 78,
"avg_line_length": 32.98076923076923,
"alnum_prop": 0.6807580174927114,
"repo_name": "aYukiSekiguchi/ACCESS-Chromium",
"id": "7252e281648fc8fdc5ebf3cd33d61c2d578c2fe8",
"size": "3617",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "native_client_sdk/src/build_tools/install_boost/install_boost.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1174606"
},
{
"name": "C",
"bytes": "65916105"
},
{
"name": "C++",
"bytes": "113472993"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Go",
"bytes": "10440"
},
{
"name": "Java",
"bytes": "11354"
},
{
"name": "JavaScript",
"bytes": "8864255"
},
{
"name": "Objective-C",
"bytes": "8990130"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "Perl",
"bytes": "903036"
},
{
"name": "Python",
"bytes": "5269405"
},
{
"name": "R",
"bytes": "524"
},
{
"name": "Shell",
"bytes": "4123452"
},
{
"name": "Tcl",
"bytes": "277077"
}
],
"symlink_target": ""
}
|
import json
import logging
from django.conf import settings
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from desktop import appmanager
from desktop.lib.django_util import render, login_notrequired
from desktop.log.access import access_log_level
from desktop.models import Settings
from desktop.views import collect_usage
@login_notrequired
@access_log_level(logging.DEBUG)
def admin_wizard(request):
if request.user.is_superuser:
apps = appmanager.get_apps(request.user)
else:
apps = []
app_names = [app.name for app in sorted(apps, key=lambda app: app.menu_index)]
tours_and_tutorials = Settings.get_settings().tours_and_tutorials
return render('admin_wizard.mako', request, {
'version': settings.HUE_DESKTOP_VERSION,
'apps': dict([(app.name, app) for app in apps]),
'app_names': app_names,
'tours_and_tutorials': tours_and_tutorials,
'collect_usage': collect_usage(),
})
def update_preferences(request):
response = {'status': -1, 'data': ''}
if request.method == 'POST':
try:
settings = Settings.get_settings()
settings.tours_and_tutorials = request.POST.get('tours_and_tutorials', False)
settings.collect_usage = request.POST.get('collect_usage', False)
settings.save()
response['status'] = 0
response['tours_and_tutorials'] = settings.tours_and_tutorials
response['collect_usage'] = settings.collect_usage
except Exception, e:
response['data'] = str(e)
else:
response['data'] = _('POST request required.')
return HttpResponse(json.dumps(response), mimetype="application/json")
|
{
"content_hash": "6e7431186bf653067a6e6ae66afdd2cb",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 83,
"avg_line_length": 31.78846153846154,
"alnum_prop": 0.7047791893526921,
"repo_name": "yongshengwang/builthue",
"id": "ed980f85632a73a7d4b947400a8cd318f28ac005",
"size": "2446",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/about/src/about/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "207947"
},
{
"name": "C",
"bytes": "10774013"
},
{
"name": "C++",
"bytes": "184593"
},
{
"name": "CSS",
"bytes": "655282"
},
{
"name": "Emacs Lisp",
"bytes": "14875"
},
{
"name": "GAP",
"bytes": "11337"
},
{
"name": "Java",
"bytes": "3080564"
},
{
"name": "JavaScript",
"bytes": "2418037"
},
{
"name": "Makefile",
"bytes": "86977"
},
{
"name": "Perl",
"bytes": "161801"
},
{
"name": "PigLatin",
"bytes": "282"
},
{
"name": "Prolog",
"bytes": "4590"
},
{
"name": "Python",
"bytes": "29990389"
},
{
"name": "Shell",
"bytes": "38643"
},
{
"name": "TeX",
"bytes": "129526"
},
{
"name": "Thrift",
"bytes": "99710"
},
{
"name": "XSLT",
"bytes": "367778"
}
],
"symlink_target": ""
}
|
"""
Constructs a data source for the ga4gh server by downloading data from
authoritative remote servers.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import gzip
import os
import requests
import shutil
import subprocess
import tempfile
import urllib2
import pysam
import utils
utils.ga4ghImportGlue()
# We need to turn off QA because of the import glue
import ga4gh.datarepo as datarepo # NOQA
import ga4gh.datamodel.references as references # NOQA
import ga4gh.datamodel.datasets as datasets # NOQA
import ga4gh.datamodel.variants as variants # NOQA
import ga4gh.datamodel.reads as reads # NOQA
class ChromMinMax(object):
"""
A container class for storing the min and max position seen
for every chromosome
"""
defaultMinPos = 2**30
defaultMaxPos = 0
class MinMax(object):
def __init__(self):
self.minPos = ChromMinMax.defaultMinPos
self.maxPos = ChromMinMax.defaultMaxPos
def __init__(self):
self.chromMap = {}
def addPos(self, chrom, position):
if chrom not in self.chromMap:
self.chromMap[chrom] = self.MinMax()
minMax = self.chromMap[chrom]
if position > minMax.maxPos:
minMax.maxPos = position
if position < minMax.minPos:
minMax.minPos = position
def getMinPos(self, chrom):
minMax = self.chromMap[chrom]
return minMax.minPos
def getMaxPos(self, chrom):
minMax = self.chromMap[chrom]
return minMax.maxPos
def _fetchSequence(ac, startIndex=None, endIndex=None):
"""Fetch sequences from NCBI using the eself interface.
An interbase interval may be optionally provided with startIndex and
endIndex. NCBI eself will return just the requested subsequence, which
might greatly reduce payload sizes (especially with chromosome-scale
sequences). When wrapped is True, return list of sequence lines rather
than concatenated sequence.
>>> len(_fetchSequence('NP_056374.2'))
1596
Pass the desired interval rather than using Python's [] slice
operator.
>>> _fetchSequence('NP_056374.2',0,10)
'MESRETLSSS'
>>> _fetchSequence('NP_056374.2')[0:10]
'MESRETLSSS'
"""
urlFmt = (
"http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?"
"db=nucleotide&id={ac}&rettype=fasta&retmode=text")
if startIndex is None or endIndex is None:
url = urlFmt.format(ac=ac)
else:
urlFmt += "&seq_start={start}&seq_stop={stop}"
url = urlFmt.format(ac=ac, start=startIndex + 1, stop=endIndex)
resp = requests.get(url)
resp.raise_for_status()
seqlines = resp.content.splitlines()[1:]
print("{ac}[{s},{e}) => {n} lines ({u})".format(
ac=ac, s=startIndex, e=endIndex, n=len(seqlines), u=url))
# return response as list of lines, already line wrapped
return seqlines
class AbstractFileDownloader(object):
"""
Base class for individual site genome file downloaders
"""
def __init__(self, args):
self.excludeReferenceMin = args.exclude_reference_min
self.maxVariants = args.num_variants
self.maxReads = args.num_reads
self.samples = args.samples.split(',')
self.tempDir = tempfile.mkdtemp(prefix="ga4gh-download")
self.numChromosomes = args.num_chromosomes
self.chromosomes = [str(j + 1) for j in range(self.numChromosomes)]
self.dirName = args.destination
self.datasetName = '1kg-p3-subset'
self.variantSetName = 'mvncall'
self.referenceSetName = 'GRCh37-subset'
self.chromMinMax = ChromMinMax()
self.accessions = {
'1': 'CM000663.1',
'2': 'CM000664.1',
'3': 'CM000665.1',
'4': 'CM000666.1',
'5': 'CM000667.1',
'6': 'CM000668.1',
'7': 'CM000669.1',
'8': 'CM000670.1',
'9': 'CM000671.1',
'10': 'CM000672.1',
'11': 'CM000673.1',
'12': 'CM000674.1',
'13': 'CM000675.1',
'14': 'CM000676.1',
'15': 'CM000677.1',
'16': 'CM000678.1',
'17': 'CM000679.1',
'18': 'CM000680.1',
'19': 'CM000681.1',
'20': 'CM000682.1',
'21': 'CM000683.1',
'22': 'CM000684.1',
}
self.studyMap = {
'HG00096': 'GBR',
'HG00533': 'CHS',
'HG00534': 'CHS',
}
self.vcfFilePaths = []
self.bamFilePaths = []
self.fastaFilePath = None
if os.path.exists(self.dirName) and args.force:
shutil.rmtree(self.dirName)
os.mkdir(self.dirName)
self.repoPath = os.path.join(self.dirName, "repo.db")
def log(self, message):
print(message)
def runCommand(self, command):
"""
Runs the specified command.
"""
subprocess.check_call(command, shell=True)
def getVcfBaseUrl(self):
return os.path.join(self.getBaseUrl(), 'ftp/release/20130502/')
def getBamBaseUrl(self):
return os.path.join(self.getBaseUrl(), 'ftp/phase3/data/')
def _updatePositions(self, fileName):
localVariantFile = pysam.VariantFile(fileName)
localIterator = localVariantFile.fetch()
for record in localIterator:
self.chromMinMax.addPos(record.chrom, record.start)
localIterator = None
localVariantFile.close()
self.log('chrom: {}, maxPos: {}, minPos: {}'.format(
record.chrom, self.chromMinMax.getMaxPos(record.chrom),
self.chromMinMax.getMinPos(record.chrom)))
def _writeVcfTempFile(self, localTempFileName, data):
with tempfile.NamedTemporaryFile() as binaryFile:
binaryFile.write(data)
binaryFile.flush()
gzipFile = gzip.open(binaryFile.name, "r")
outputFile = open(localTempFileName, "w")
lineCount = 0
for line in gzipFile:
outputFile.write(line)
if not line.startswith("#"):
lineCount += 1
if lineCount >= self.maxVariants:
break
assert lineCount == self.maxVariants
outputFile.close()
gzipFile.close()
def _downloadVcf(self, chromosome):
sourceFileName = (
"ALL.chr{}.phase3_shapeit2_mvncall_integrated_v5a"
".20130502.genotypes.vcf.gz").format(chromosome)
url = os.path.join(self.getVcfBaseUrl(), sourceFileName)
self.log("Downloading '{}'".format(url))
response = urllib2.urlopen(url)
megabyte = 1024 * 1024
data = response.read(megabyte)
localFileName = os.path.join(
self.dirName, "chr{}.vcf".format(chromosome))
localCompressedFileName = "{}.gz".format(localFileName)
localTempFileName = localFileName + '.unsampled'
self.log("Writing '{}'".format(localTempFileName))
self._writeVcfTempFile(localTempFileName, data)
self.log("Sampling '{}'".format(localTempFileName))
self.runCommand(
'bcftools view --force-samples -s {} {} -o {}'.format(
args.samples, localTempFileName, localFileName))
os.remove(localTempFileName)
self.log("Compressing '{}'".format(localFileName))
self.runCommand('bgzip -f {}'.format(localFileName))
self.log("Indexing '{}'".format(localCompressedFileName))
self.runCommand('tabix {}'.format(localCompressedFileName))
self._updatePositions(localCompressedFileName)
self.vcfFilePaths.append(
(localCompressedFileName, localCompressedFileName + ".tbi"))
def downloadVcfs(self):
for chromosome in self.chromosomes:
self._downloadVcf(chromosome)
def createBamHeader(self, baseHeader):
"""
Creates a new bam header based on the specified header from the
parent BAM file.
"""
header = dict(baseHeader)
newSequences = []
for index, referenceInfo in enumerate(header['SQ']):
if index < self.numChromosomes:
referenceName = referenceInfo['SN']
# The sequence dictionary in the BAM file has to match up
# with the sequence ids in the data, so we must be sure
# that these still match up.
assert referenceName == self.chromosomes[index]
newReferenceInfo = {
'AS': self.referenceSetName,
'SN': referenceName,
'LN': 0, # FIXME
'UR': 'http://example.com',
'M5': 'dbb6e8ece0b5de29da56601613007c2a', # FIXME
'SP': 'Human'
}
newSequences.append(newReferenceInfo)
header['SQ'] = newSequences
return header
def _downloadIndex(self, indexUrl, localIndexFile):
self.log("Downloading index from {} to {}".format(
indexUrl, localIndexFile))
response = urllib2.urlopen(indexUrl)
with open(localIndexFile, "w") as destFile:
destFile.write(response.read())
def _downloadBam(self, sample):
samplePath = '{}/alignment/'.format(sample)
study = self.studyMap[sample]
sourceFileName = (
'{}.mapped.ILLUMINA.bwa.{}.'
'low_coverage.20120522.bam'.format(sample, study))
destFileName = os.path.join(
self.dirName, "{}.bam".format(sample))
baseUrl = self.getBamBaseUrl()
sampleUrl = os.path.join(baseUrl, samplePath, sourceFileName)
indexUrl = sampleUrl + ".bai"
localIndexFile = os.path.join(self.tempDir, sourceFileName + ".bai")
self._downloadIndex(indexUrl, localIndexFile)
remoteFile = pysam.AlignmentFile(
sampleUrl, filepath_index=localIndexFile)
header = self.createBamHeader(remoteFile.header)
self.log("Writing '{}'".format(destFileName))
localFile = pysam.AlignmentFile(
destFileName, 'wb', header=header)
for chromosome in self.chromosomes:
self.log("chromosome {}".format(chromosome))
iterator = remoteFile.fetch(
chromosome.encode('utf-8'),
start=self.chromMinMax.getMinPos(chromosome),
end=self.chromMinMax.getMaxPos(chromosome))
for index, record in enumerate(iterator):
# We only write records where we have the references
# for the next mate. TODO we should take the positions
# of these reads into account later when calculating
# our reference bounds.
if record.next_reference_id < self.numChromosomes:
if index >= self.maxReads:
break
localFile.write(record)
self.log("{} records written".format(index))
remoteFile.close()
localFile.close()
self.log("Indexing '{}'".format(destFileName))
pysam.index(destFileName.encode('utf-8'))
self.bamFilePaths.append(
(destFileName, destFileName + ".bai"))
def downloadBams(self):
for sample in self.samples:
self._downloadBam(sample)
def _downloadFasta(self, chromosomes):
fileName = os.path.join(self.dirName, "GRCh37-subset.fa")
with open(fileName, "w") as outFasta:
for chromosome in chromosomes:
accession = self.accessions[chromosome]
minPos = 0
if self.excludeReferenceMin:
minPos = self.chromMinMax.getMinPos(chromosome)
maxPos = self.chromMinMax.getMaxPos(chromosome)
print(minPos, maxPos)
print(">{}".format(chromosome), file=outFasta)
sequence = _fetchSequence(accession, minPos, maxPos)
for line in sequence:
print(line, file=outFasta)
self.log("Compressing {}".format(fileName))
self.runCommand("bgzip -f {}".format(fileName))
compressedFileName = fileName + '.gz'
self.log("Indexing {}".format(compressedFileName))
self.runCommand("samtools faidx {}".format(compressedFileName))
self.fastaFilePath = compressedFileName
def downloadReference(self):
self._downloadFasta(self.chromosomes)
def createRepo(self):
"""
Creates the repository for all the data we've just downloaded.
"""
repo = datarepo.SqlDataRepository(self.repoPath)
repo.open("w")
repo.initialise()
referenceSet = references.HtslibReferenceSet("GRCh37-subset")
referenceSet.populateFromFile(self.fastaFilePath)
referenceSet.setDescription("Subset of GRCh37 used for demonstration")
referenceSet.setNcbiTaxonId(9606)
for reference in referenceSet.getReferences():
reference.setNcbiTaxonId(9606)
reference.setSourceAccessions(
self.accessions[reference.getName()] + ".subset")
repo.insertReferenceSet(referenceSet)
dataset = datasets.Dataset("1kg-p3-subset")
dataset.setDescription("Sample data from 1000 Genomes phase 3")
repo.insertDataset(dataset)
variantSet = variants.HtslibVariantSet(dataset, "mvncall")
variantSet.setReferenceSet(referenceSet)
dataUrls = [vcfFile for vcfFile, _ in self.vcfFilePaths]
indexFiles = [indexFile for _, indexFile in self.vcfFilePaths]
variantSet.populateFromFile(dataUrls, indexFiles)
variantSet.checkConsistency()
repo.insertVariantSet(variantSet)
for sample, (bamFile, indexFile) in zip(
self.samples, self.bamFilePaths):
readGroupSet = reads.HtslibReadGroupSet(dataset, sample)
readGroupSet.populateFromFile(bamFile, indexFile)
readGroupSet.setReferenceSet(referenceSet)
repo.insertReadGroupSet(readGroupSet)
repo.commit()
repo.close()
self.log("Finished creating the repository; summary:\n")
repo.open("r")
repo.printSummary()
def cleanup(self):
self.log('Removing temporary files')
shutil.rmtree(self.tempDir)
class NcbiFileDownloader(AbstractFileDownloader):
"""
Downloads files from NCBI
"""
def getBaseUrl(self):
return 'ftp://ftp-trace.ncbi.nih.gov/1000genomes'
class EbiFileDownloader(AbstractFileDownloader):
"""
Downloads files from EBI
"""
def getBaseUrl(self):
return 'ftp://ftp.1000genomes.ebi.ac.uk/vol1'
sources = {
"ncbi": NcbiFileDownloader,
"ebi": EbiFileDownloader,
}
def parseArgs():
parser = argparse.ArgumentParser()
parser.add_argument(
"destination",
help="the name of the directory that the data is downloaded to")
parser.add_argument(
"-f", "--force", default=False, action="store_true",
help="Overwrite an existing directory with the same name")
parser.add_argument(
"--source", default="ncbi", choices=sources.keys(),
help="the source to download from")
parser.add_argument(
"--samples", default='HG00096,HG00533,HG00534',
help="a comma-seperated list of samples to download")
parser.add_argument(
"--num-reads", "-r", default=1000, type=int,
help="the number of reads to download per reference")
parser.add_argument(
"--num-variants", "-V", default=1000, type=int,
help="the maximum number of variants to download per VCF file.")
parser.add_argument(
"--num-chromosomes", default="3", type=int,
help=(
"the number of chromosomes whose corresponding reads should "
"be downloaded"))
parser.add_argument(
"--exclude-reference-min", default=False, action="store_true",
help="Exclude bases in the reference before the minimum position")
args = parser.parse_args()
return args
def main(args):
downloaderClass = sources[args.source]
downloader = downloaderClass(args)
try:
downloader.downloadVcfs()
downloader.downloadReference()
downloader.downloadBams()
downloader.createRepo()
finally:
downloader.cleanup()
if __name__ == '__main__':
args = parseArgs()
main(args)
|
{
"content_hash": "a1e6c7d6de1d1f022ed6e039078e30b9",
"timestamp": "",
"source": "github",
"line_count": 452,
"max_line_length": 78,
"avg_line_length": 36.63495575221239,
"alnum_prop": 0.609879823660849,
"repo_name": "macieksmuga/server",
"id": "d79170417fe702e66cbf1d90aae66c5f2c790af5",
"size": "16559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/download_example_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6325"
},
{
"name": "Jupyter Notebook",
"bytes": "41899"
},
{
"name": "Python",
"bytes": "951829"
},
{
"name": "Shell",
"bytes": "973"
}
],
"symlink_target": ""
}
|
"""Base class for optimizers."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.training import distribution_strategy_context
from tensorflow.python.training import slot_creator
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
def get_filtered_grad_fn(grad_fn):
# `distributed_context.join()` requires that its arguments are parallel
# across threads, and in particular that `grads_and_vars` has the same
# variables in the same order.
# When computing gradients in eager mode with multiple threads, you
# can get extra variables with a gradient of `None`. This happens when
# those variables are accessed in another thread during the gradient
# computation. To get a consistent set of variables, we filter out
# those with `None` gradients.
def filtered_grad_fn(*args, **kwargs):
return [(g, v) for g, v in grad_fn(*args, **kwargs) if g is not None]
return filtered_grad_fn
def _deduplicate_indexed_slices(values, indices):
"""Sums `values` associated with any non-unique `indices`.
Args:
values: A `Tensor` with rank >= 1.
indices: A one-dimensional integer `Tensor`, indexing into the first
dimension of `values` (as in an IndexedSlices object).
Returns:
A tuple of (`summed_values`, `unique_indices`) where `unique_indices` is a
de-duplicated version of `indices` and `summed_values` contains the sum of
`values` slices associated with each unique index.
"""
unique_indices, new_index_positions = array_ops.unique(indices)
summed_values = math_ops.unsorted_segment_sum(
values, new_index_positions,
array_ops.shape(unique_indices)[0])
return (summed_values, unique_indices)
def _var_key(var):
# TODO(ashankar): Consolidate handling for eager and graph
if hasattr(var, "op"):
return (var.op.graph, var.op.name)
return var._unique_id # pylint: disable=protected-access
class _OptimizableVariable(object):
"""Interface for abstracting over variables in the optimizers."""
@abc.abstractmethod
def target(self):
"""Returns the optimization target for this variable."""
raise NotImplementedError("Calling an abstract method.")
@abc.abstractmethod
def update_op(self, optimizer, g):
"""Returns the update ops for updating the variable."""
raise NotImplementedError("Calling an abstract method.")
class _RefVariableProcessor(_OptimizableVariable):
"""Processor for Variable."""
def __init__(self, v):
self._v = v
def __str__(self):
return "<_RefVariableProcessor(%s)>" % self._v
def target(self):
return self._v._ref() # pylint: disable=protected-access
def update_op(self, optimizer, g):
if isinstance(g, ops.Tensor):
update_op = optimizer._apply_dense(g, self._v) # pylint: disable=protected-access
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
else:
assert isinstance(g, ops.IndexedSlices), ("Gradient ", g, " is neither a "
"tensor nor IndexedSlices.")
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
# pylint: disable=protected-access
return optimizer._apply_sparse_duplicate_indices(g, self._v)
class _DenseReadResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
update_op = optimizer._resource_apply_dense(g, self._v.op.inputs[0])
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _DenseResourceVariableProcessor(_OptimizableVariable):
"""Processor for dense ResourceVariables."""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
# pylint: disable=protected-access
if isinstance(g, ops.IndexedSlices):
if self._v.constraint is not None:
raise RuntimeError(
"Cannot use a constraint function on a sparse variable.")
return optimizer._resource_apply_sparse_duplicate_indices(
g.values, self._v, g.indices)
update_op = optimizer._resource_apply_dense(g, self._v)
if self._v.constraint is not None:
with ops.control_dependencies([update_op]):
return self._v.assign(self._v.constraint(self._v))
else:
return update_op
class _TensorProcessor(_OptimizableVariable):
"""Processor for ordinary Tensors.
Even though a Tensor can't really be updated, sometimes it is useful to
compute the gradients with respect to a Tensor using the optimizer. Updating
the Tensor is, of course, unsupported.
"""
def __init__(self, v):
self._v = v
def target(self):
return self._v
def update_op(self, optimizer, g):
raise NotImplementedError("Trying to update a Tensor ", self._v)
def _get_processor(v):
"""The processor of v."""
if context.executing_eagerly():
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
else:
return _DenseResourceVariableProcessor(v)
if isinstance(
v, resource_variable_ops.ResourceVariable) and not v._in_graph_mode: # pylint: disable=protected-access
# True if and only if `v` was initialized eagerly.
return _DenseResourceVariableProcessor(v)
if v.op.type == "VarHandleOp":
return _DenseResourceVariableProcessor(v)
if isinstance(v, variables.Variable):
return _RefVariableProcessor(v)
if isinstance(v, ops.Tensor):
return _TensorProcessor(v)
raise NotImplementedError("Trying to optimize unsupported type ", v)
@tf_export("train.Optimizer")
class Optimizer(
# Optimizers inherit from CheckpointableBase rather than Checkpointable
# since they do most of their dependency management themselves (slot
# variables are special-cased, and non-slot variables are keyed to graphs).
checkpointable.CheckpointableBase):
"""Base class for optimizers.
This class defines the API to add Ops to train a model. You never use this
class directly, but instead instantiate one of its subclasses such as
`GradientDescentOptimizer`, `AdagradOptimizer`, or `MomentumOptimizer`.
### Usage
```python
# Create an optimizer with the desired parameters.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Add Ops to the graph to minimize a cost by updating a list of variables.
# "cost" is a Tensor, and the list of variables contains tf.Variable
# objects.
opt_op = opt.minimize(cost, var_list=<list of variables>)
```
In the training program you will just have to run the returned Op.
```python
# Execute opt_op to do one step of training:
opt_op.run()
```
### Processing gradients before applying them.
Calling `minimize()` takes care of both computing the gradients and
applying them to the variables. If you want to process the gradients
before applying them you can instead use the optimizer in three steps:
1. Compute the gradients with `compute_gradients()`.
2. Process the gradients as you wish.
3. Apply the processed gradients with `apply_gradients()`.
Example:
```python
# Create an optimizer.
opt = GradientDescentOptimizer(learning_rate=0.1)
# Compute the gradients for a list of variables.
grads_and_vars = opt.compute_gradients(loss, <list of variables>)
# grads_and_vars is a list of tuples (gradient, variable). Do whatever you
# need to the 'gradient' part, for example cap them, etc.
capped_grads_and_vars = [(MyCapper(gv[0]), gv[1]) for gv in grads_and_vars]
# Ask the optimizer to apply the capped gradients.
opt.apply_gradients(capped_grads_and_vars)
```
### Gating Gradients
Both `minimize()` and `compute_gradients()` accept a `gate_gradients`
argument that controls the degree of parallelism during the application of
the gradients.
The possible values are: `GATE_NONE`, `GATE_OP`, and `GATE_GRAPH`.
<b>`GATE_NONE`</b>: Compute and apply gradients in parallel. This provides
the maximum parallelism in execution, at the cost of some non-reproducibility
in the results. For example the two gradients of `matmul` depend on the input
values: With `GATE_NONE` one of the gradients could be applied to one of the
inputs _before_ the other gradient is computed resulting in non-reproducible
results.
<b>`GATE_OP`</b>: For each Op, make sure all gradients are computed before
they are used. This prevents race conditions for Ops that generate gradients
for multiple inputs where the gradients depend on the inputs.
<b>`GATE_GRAPH`</b>: Make sure all gradients for all variables are computed
before any one of them is used. This provides the least parallelism but can
be useful if you want to process all gradients before applying any of them.
### Slots
Some optimizer subclasses, such as `MomentumOptimizer` and `AdagradOptimizer`
allocate and manage additional variables associated with the variables to
train. These are called <i>Slots</i>. Slots have names and you can ask the
optimizer for the names of the slots that it uses. Once you have a slot name
you can ask the optimizer for the variable it created to hold the slot value.
This can be useful if you want to log debug a training algorithm, report stats
about the slots, etc.
"""
# Values for gate_gradients.
GATE_NONE = 0
GATE_OP = 1
GATE_GRAPH = 2
def __init__(self, use_locking, name):
"""Create a new Optimizer.
This must be called by the constructors of subclasses.
Args:
use_locking: Bool. If True apply use locks to prevent concurrent updates
to variables.
name: A non-empty string. The name to use for accumulators created
for the optimizer.
Raises:
ValueError: If name is malformed.
"""
if not name:
raise ValueError("Must specify the optimizer name")
self._use_locking = use_locking
self._name = name
# Dictionary of slots.
# {slot_name :
# {_var_key(variable_to_train): slot_for_the_variable, ... },
# ... }
self._slots = {}
self._non_slot_dict = {}
# For implementing Checkpointable. Stores information about how to restore
# slot variables which have not yet been created
# (checkpointable._CheckpointPosition objects).
# {slot_name :
# {_var_key(variable_to_train): [checkpoint_position, ... ], ... },
# ... }
self._deferred_slot_restorations = {}
# TODO(isaprykin): When using a DistributionStrategy, and when an
# optimizer is created in each tower, it might be dangerous to
# rely on some Optimer methods. When such methods are called on a
# per-tower optimizer, an exception needs to be thrown. We do
# allow creation per-tower optimizers however, because the
# compute_gradients()->apply_gradients() sequence is safe.
def get_name(self):
return self._name
def minimize(self, loss, global_step=None, var_list=None,
gate_gradients=GATE_OP, aggregation_method=None,
colocate_gradients_with_ops=False, name=None,
grad_loss=None):
"""Add operations to minimize `loss` by updating `var_list`.
This method simply combines calls `compute_gradients()` and
`apply_gradients()`. If you want to process the gradient before applying
them call `compute_gradients()` and `apply_gradients()` explicitly instead
of using this function.
Args:
loss: A `Tensor` containing the value to minimize.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
var_list: Optional list or tuple of `Variable` objects to update to
minimize `loss`. Defaults to the list of variables collected in
the graph under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
name: Optional name for the returned operation.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
An Operation that updates the variables in `var_list`. If `global_step`
was not `None`, that operation also increments `global_step`.
Raises:
ValueError: If some of the variables are not `Variable` objects.
@compatibility(eager)
When eager execution is enabled, `loss` should be a Python function that
takes no arguments and computes the value to be minimized. Minimization (and
gradient computation) is done with respect to the elements of `var_list` if
not None, else with respect to any trainable variables created during the
execution of the `loss` function. `gate_gradients`, `aggregation_method`,
`colocate_gradients_with_ops` and `grad_loss` are ignored when eager
execution is enabled.
@end_compatibility
"""
grads_and_vars = self.compute_gradients(
loss, var_list=var_list, gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops,
grad_loss=grad_loss)
vars_with_grad = [v for g, v in grads_and_vars if g is not None]
if not vars_with_grad:
raise ValueError(
"No gradients provided for any variable, check your graph for ops"
" that do not support gradients, between variables %s and loss %s." %
([str(v) for _, v in grads_and_vars], loss))
return self.apply_gradients(grads_and_vars, global_step=global_step,
name=name)
def compute_gradients(self, loss, var_list=None,
gate_gradients=GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
grad_loss=None):
"""Compute gradients of `loss` for the variables in `var_list`.
This is the first part of `minimize()`. It returns a list
of (gradient, variable) pairs where "gradient" is the gradient
for "variable". Note that "gradient" can be a `Tensor`, an
`IndexedSlices`, or `None` if there is no gradient for the
given variable.
Args:
loss: A Tensor containing the value to minimize or a callable taking
no arguments which returns the value to minimize. When eager execution
is enabled it must be a callable.
var_list: Optional list or tuple of `tf.Variable` to update to minimize
`loss`. Defaults to the list of variables collected in the graph
under the key `GraphKeys.TRAINABLE_VARIABLES`.
gate_gradients: How to gate the computation of gradients. Can be
`GATE_NONE`, `GATE_OP`, or `GATE_GRAPH`.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
grad_loss: Optional. A `Tensor` holding the gradient computed for `loss`.
Returns:
A list of (gradient, variable) pairs. Variable is always present, but
gradient can be `None`.
Raises:
TypeError: If `var_list` contains anything else than `Variable` objects.
ValueError: If some arguments are invalid.
RuntimeError: If called with eager execution enabled and `loss` is
not callable.
@compatibility(eager)
When eager execution is enabled, `gate_gradients`, `aggregation_method`,
and `colocate_gradients_with_ops` are ignored.
@end_compatibility
"""
if callable(loss):
with backprop.GradientTape() as tape:
if var_list is not None:
tape.watch(var_list)
loss_value = loss()
# Scale loss if using a "mean" loss reduction and multiple towers.
# Have to be careful to call distribute_lib.get_loss_reduction()
# *after* loss() is evaluated, so we know what loss reduction it uses.
# TODO(josh11b): Test that we handle weight decay in a reasonable way.
if (distribute_lib.get_loss_reduction() ==
variable_scope.VariableAggregation.MEAN):
num_towers = distribution_strategy_context.get_distribution_strategy(
).num_towers
if num_towers > 1:
loss_value *= (1. / num_towers)
if var_list is None:
var_list = tape.watched_variables()
grads = tape.gradient(loss_value, var_list, grad_loss)
return list(zip(grads, var_list))
# Non-callable/Tensor loss case
if context.executing_eagerly():
raise RuntimeError(
"`loss` passed to Optimizer.compute_gradients should "
"be a function when eager execution is enabled.")
# Scale loss if using a "mean" loss reduction and multiple towers.
if (distribute_lib.get_loss_reduction() ==
variable_scope.VariableAggregation.MEAN):
num_towers = distribution_strategy_context.get_distribution_strategy(
).num_towers
if num_towers > 1:
loss *= (1. / num_towers)
if gate_gradients not in [Optimizer.GATE_NONE, Optimizer.GATE_OP,
Optimizer.GATE_GRAPH]:
raise ValueError("gate_gradients must be one of: Optimizer.GATE_NONE, "
"Optimizer.GATE_OP, Optimizer.GATE_GRAPH. Not %s" %
gate_gradients)
self._assert_valid_dtypes([loss])
if grad_loss is not None:
self._assert_valid_dtypes([grad_loss])
if var_list is None:
var_list = (
variables.trainable_variables() +
ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES))
else:
var_list = nest.flatten(var_list)
# pylint: disable=protected-access
var_list += ops.get_collection(ops.GraphKeys._STREAMING_MODEL_PORTS)
# pylint: enable=protected-access
processors = [_get_processor(v) for v in var_list]
if not var_list:
raise ValueError("No variables to optimize.")
var_refs = [p.target() for p in processors]
grads = gradients.gradients(
loss, var_refs, grad_ys=grad_loss,
gate_gradients=(gate_gradients == Optimizer.GATE_OP),
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops)
if gate_gradients == Optimizer.GATE_GRAPH:
grads = control_flow_ops.tuple(grads)
grads_and_vars = list(zip(grads, var_list))
self._assert_valid_dtypes(
[v for g, v in grads_and_vars
if g is not None and v.dtype != dtypes.resource])
return grads_and_vars
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
This is the second part of `minimize()`. It returns an `Operation` that
applies gradients.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`.
global_step: Optional `Variable` to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients. If `global_step`
was not None, that operation also increments `global_step`.
Raises:
TypeError: If `grads_and_vars` is malformed.
ValueError: If none of the variables have gradients.
RuntimeError: If you should use `_distributed_apply()` instead.
"""
# This is a default implementation of apply_gradients() that can be shared
# by most optimizers. It relies on the subclass implementing the following
# methods: _create_slots(), _prepare(), _apply_dense(), and _apply_sparse().
# Handle DistributionStrategy case.
if distribution_strategy_context.get_cross_tower_context():
raise RuntimeError("Use `_distributed_apply()` instead of "
"`apply_gradients()` in a cross-tower context.")
# TODO(isaprykin): Get rid of `has_distribution_strategy()` check by
# always calling _distributed_apply(), using the default distribution
# as needed.
if distribution_strategy_context.has_distribution_strategy():
grads_and_vars = get_filtered_grad_fn(lambda: grads_and_vars)()
return distribution_strategy_context.get_tower_context().merge_call(
self._distributed_apply, grads_and_vars, global_step, name)
# No DistributionStrategy case.
grads_and_vars = tuple(grads_and_vars) # Make sure repeat iteration works.
if not grads_and_vars:
raise ValueError("No variables provided.")
converted_grads_and_vars = []
for g, v in grads_and_vars:
if g is not None:
try:
# Convert the grad to Tensor or IndexedSlices if necessary.
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError(
"Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
p = _get_processor(v)
converted_grads_and_vars.append((g, v, p))
converted_grads_and_vars = tuple(converted_grads_and_vars)
var_list = [v for g, v, _ in converted_grads_and_vars if g is not None]
if not var_list:
raise ValueError("No gradients provided for any variable: %s." %
([str(v) for _, _, v in converted_grads_and_vars],))
with ops.init_scope():
self._create_slots(var_list)
update_ops = []
with ops.name_scope(name, self._name) as name:
self._prepare()
for grad, var, processor in converted_grads_and_vars:
if grad is None:
continue
# We colocate all ops created in _apply_dense or _apply_sparse
# on the same device as the variable.
# TODO(apassos): figure out how to get the variable name here.
if context.executing_eagerly() or isinstance(
var,
resource_variable_ops.ResourceVariable) and not var._in_graph_mode: # pylint: disable=protected-access
scope_name = ""
else:
scope_name = var.op.name
with ops.name_scope("update_" + scope_name), ops.colocate_with(var):
update_ops.append(processor.update_op(self, grad))
if global_step is None:
apply_updates = self._finish(update_ops, name)
else:
with ops.control_dependencies([self._finish(update_ops, "update")]):
with ops.colocate_with(global_step):
if isinstance(global_step, resource_variable_ops.ResourceVariable):
# TODO(apassos): the implicit read in assign_add is slow; consider
# making it less so.
apply_updates = resource_variable_ops.assign_add_variable_op(
global_step.handle,
ops.convert_to_tensor(1, dtype=global_step.dtype),
name=name)
else:
apply_updates = state_ops.assign_add(global_step, 1, name=name)
if not context.executing_eagerly():
if isinstance(apply_updates, ops.Tensor):
apply_updates = apply_updates.op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def _distributed_apply(self,
distribution,
grads_and_vars,
global_step=None,
name=None):
"""A version of `apply_gradients` for cross-tower context.
This is a version of `apply_gradients()` for when you are using a
`DistributionStrategy` and are in a cross-tower context. If in a
tower context, use `apply_gradients()` as normal.
Args:
distribution: A `DistributionStrategy` object.
grads_and_vars: List of (gradient, variable) pairs as returned by
`compute_gradients()`, and then aggregated across towers.
global_step: Optional (mirrored) `Variable` to increment by one
after the variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the `Optimizer` constructor.
Returns:
An `Operation` that applies the specified gradients across all
towers. If `global_step` was not None, that operation also
increments `global_step`.
"""
reduced_grads = distribution.batch_reduce(
variable_scope.VariableAggregation.SUM, grads_and_vars)
var_list = [v for _, v in grads_and_vars]
grads_and_vars = zip(reduced_grads, var_list)
# Note that this is called in a cross-tower context.
self._create_slots(var_list)
def update(v, g):
"""Apply gradients to a replica variable."""
assert v is not None
try:
# Convert the grad to Tensor or IndexedSlices if necessary.
g = ops.convert_to_tensor_or_indexed_slices(g)
except TypeError:
raise TypeError("Gradient must be convertible to a Tensor"
" or IndexedSlices, or None: %s" % g)
if not isinstance(g, (ops.Tensor, ops.IndexedSlices)):
raise TypeError(
"Gradient must be a Tensor, IndexedSlices, or None: %s" % g)
p = _get_processor(v)
scope_name = "" if context.executing_eagerly() else v.op.name
# device_policy is set because non-mirrored tensors will be read in
# `update_op`. `_resource_apply_dense`, `lr_t`, `beta1_t` and `beta2_t`
# is an example.
with ops.name_scope("update_" + scope_name):
return p.update_op(self, g)
with ops.name_scope(name, self._name) as name:
self._prepare()
update_ops = [
op
for grad, var in grads_and_vars
for op in distribution.unwrap(distribution.update(var, update, grad))
]
def finish(self, update_ops):
return self._finish(update_ops, "update")
non_slot_devices = distribution.non_slot_devices(var_list)
finish_updates = distribution.update_non_slot(
non_slot_devices, finish, self, update_ops)
if global_step is None:
apply_updates = distribution.group(finish_updates, name=name)
else:
with ops.control_dependencies(distribution.unwrap(finish_updates)):
apply_updates = distribution.group(distribution.update(
global_step, state_ops.assign_add, 1, name=name))
if not context.executing_eagerly():
if isinstance(apply_updates, ops.Tensor):
apply_updates = apply_updates.op
train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if apply_updates not in train_op:
train_op.append(apply_updates)
return apply_updates
def get_slot(self, var, name):
"""Return a slot named `name` created for `var` by the Optimizer.
Some `Optimizer` subclasses use additional variables. For example
`Momentum` and `Adagrad` use variables to accumulate updates. This method
gives access to these `Variable` objects if for some reason you need them.
Use `get_slot_names()` to get the list of slot names created by the
`Optimizer`.
Args:
var: A variable passed to `minimize()` or `apply_gradients()`.
name: A string.
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
# pylint: disable=protected-access
named_slots = self._slots.get(name, None)
if not named_slots:
return None
if hasattr(var, "_distributed_container"):
# NOTE: If this isn't patched, then there is no `handle` in
# `_resource_apply_dense`.
distributed_container = var._distributed_container()
assert distributed_container is not None
if context.executing_eagerly():
key = distributed_container._unique_id
else:
key = (distributed_container.graph, distributed_container._shared_name)
# pylint: enable=protected-access
mirrored_slot = named_slots.get(key, None)
if mirrored_slot is None: return None
return mirrored_slot.get(device=var.device)
return named_slots.get(_var_key(var), None)
def get_slot_names(self):
"""Return a list of the names of slots created by the `Optimizer`.
See `get_slot()`.
Returns:
A list of strings.
"""
return sorted(self._slots.keys())
def variables(self):
"""A list of variables which encode the current state of `Optimizer`.
Includes slot variables and additional global variables created by the
optimizer in the current default graph.
Returns:
A list of variables.
"""
current_graph = ops.get_default_graph()
def _from_current_graph(variable):
if variable._in_graph_mode: # pylint: disable=protected-access
return variable.op.graph is current_graph
else:
# No variable.op in eager mode. We don't expect lots of eager graphs,
# but behavior should be consistent with graph mode.
return variable._graph_key == current_graph._graph_key # pylint: disable=protected-access
optimizer_variables = [v for v in self._non_slot_variables()
if _from_current_graph(v)]
for _, variable_dict in self._slots.items():
for _, slot_for_variable in variable_dict.items():
if _from_current_graph(slot_for_variable):
optimizer_variables.append(slot_for_variable)
# Sort variables by name so that the return is deterministic.
return sorted(optimizer_variables, key=lambda v: v.name)
def _create_non_slot_variable(self, initial_value, name, colocate_with):
"""Add an extra variable, not associated with a slot."""
# Recommendation: Use OptimizerV2 if your optimizer uses non-slot variables.
eager = context.executing_eagerly()
graph = None if eager else colocate_with.graph
key = (name, graph)
v = self._non_slot_dict.get(key, None)
if v is None:
self._maybe_initialize_checkpointable()
distribution_strategy = (
distribution_strategy_context.get_distribution_strategy())
with distribution_strategy.colocate_vars_with(colocate_with):
if eager:
restored_initial_value = self._preload_simple_restoration(
name=name, shape=None)
if restored_initial_value is not None:
initial_value = restored_initial_value
v = variable_scope.variable(initial_value, name=name, trainable=False)
# Restore this variable by name if necessary, but don't add a
# Checkpointable dependency. Optimizers return the current graph's
# non-slot variables from _checkpoint_dependencies explicitly rather
# than unconditionally adding dependencies (since there may be multiple
# non-slot variables with the same name in different graphs, trying to
# save all of them would result in errors).
self._handle_deferred_dependencies(name=name, checkpointable=v)
self._non_slot_dict[key] = v
return v
@property
def _checkpoint_dependencies(self):
"""From Checkpointable. Gather graph-specific non-slot variables to save."""
current_graph_non_slot_variables = []
current_graph_key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
for (name, _), variable_object in sorted(self._non_slot_dict.items(),
# Avoid comparing graphs
key=lambda item: item[0][0]):
if variable_object._graph_key == current_graph_key: # pylint: disable=protected-access
current_graph_non_slot_variables.append(
checkpointable.CheckpointableReference(
name=name, ref=variable_object))
return (super(Optimizer, self)._checkpoint_dependencies
+ current_graph_non_slot_variables)
def _lookup_dependency(self, name):
"""From Checkpointable. Find a non-slot variable in the current graph."""
unconditional = super(Optimizer, self)._lookup_dependency(name)
if unconditional is not None:
return unconditional
graph = None if context.executing_eagerly() else ops.get_default_graph()
return self._get_non_slot_variable(name, graph=graph)
def _get_non_slot_variable(self, name, graph=None):
non_slot = self._non_slot_dict.get((name, graph), None)
if hasattr(non_slot, "_distributed_container"):
# This is a mirrored non-slot. In order to enable code like `_finish`
# to assign to a non-slot, return the current context replica.
return non_slot.get()
else:
return non_slot
def _non_slot_variables(self):
"""Additional variables created by the `Optimizer`.
Returns:
A list or tuple of variables.
"""
return self._non_slot_dict.values()
def _assert_valid_dtypes(self, tensors):
"""Asserts tensors are all valid types (see `_valid_dtypes`).
Args:
tensors: Tensors to check.
Raises:
ValueError: If any tensor is not a valid type.
"""
valid_dtypes = self._valid_dtypes()
for t in tensors:
dtype = t.dtype.base_dtype
if dtype not in valid_dtypes:
raise ValueError(
"Invalid type %r for %s, expected: %s." % (
dtype, t.name, [v for v in valid_dtypes]))
# --------------
# Methods to be implemented by subclasses if they want to use the
# inherited implementation of apply_gradients() or compute_gradients().
# --------------
def _valid_dtypes(self):
"""Valid types for loss, variables and gradients.
Subclasses should override to allow other float types.
Returns:
Valid types for loss, variables and gradients.
"""
return set(
[dtypes.float16, dtypes.bfloat16, dtypes.float32, dtypes.float64])
def _create_slots(self, var_list):
"""Create all slots needed by the variables.
Args:
var_list: A list of `Variable` objects.
"""
# No slots needed by default
pass
def _prepare(self):
"""Create all needed tensors before applying gradients.
This is called with the name_scope using the "name" that
users have chosen for the application of gradients.
"""
pass
def _apply_dense(self, grad, var):
"""Add ops to apply dense gradients to `var`.
Args:
grad: A `Tensor`.
var: A `Variable` object.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _resource_apply_dense(self, grad, handle):
"""Add ops to apply dense gradients to the variable `handle`.
Args:
grad: a `Tensor` representing the gradient.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _resource_apply_sparse_duplicate_indices(self, grad, handle, indices):
"""Add ops to apply sparse gradients to `handle`, with repeated indices.
Optimizers which override this method must deal with repeated indices. See
the docstring of `_apply_sparse_duplicate_indices` for details. By default
the correct behavior, to sum non-unique indices and their associated
gradients, is enforced by first pre-processing `grad` and `indices` and
passing them on to `_resource_apply_sparse`. Optimizers which deal correctly
with duplicate indices may instead override this method to avoid the
overhead of summing.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices may be repeated.
Returns:
An `Operation` which updates the value of the variable.
"""
summed_grad, unique_indices = _deduplicate_indexed_slices(
values=grad, indices=indices)
return self._resource_apply_sparse(summed_grad, handle, unique_indices)
def _resource_apply_sparse(self, grad, handle, indices):
"""Add ops to apply sparse gradients to the variable `handle`.
Similar to `_apply_sparse`, the `indices` argument to this method has been
de-duplicated. Optimizers which deal correctly with non-unique indices may
instead override `_resource_apply_sparse_duplicate_indices` to avoid this
overhead.
Args:
grad: a `Tensor` representing the gradient for the affected indices.
handle: a `Tensor` of dtype `resource` which points to the variable
to be updated.
indices: a `Tensor` of integral type representing the indices for
which the gradient is nonzero. Indices are unique.
Returns:
An `Operation` which updates the value of the variable.
"""
raise NotImplementedError()
def _apply_sparse_duplicate_indices(self, grad, var):
"""Add ops to apply sparse gradients to `var`, with repeated sparse indices.
Optimizers which override this method must deal with IndexedSlices objects
such as the following:
IndexedSlicesValue(values=[1, 1], indices=[0, 0], dense_shape=[1])
The correct interpretation is:
IndexedSlicesValue(values=[2], indices=[0], dense_shape=[1])
Many optimizers deal incorrectly with repeated indices when updating based
on sparse gradients (e.g. summing squares rather than squaring the sum, or
applying momentum terms multiple times). Adding first is always the correct
behavior, so this is enforced here by reconstructing the IndexedSlices to
have only unique indices, then calling _apply_sparse.
Optimizers which deal correctly with repeated indices may instead override
this method to avoid the overhead of summing indices.
Args:
grad: `IndexedSlices`.
var: A `Variable` object.
Returns:
An `Operation`.
"""
summed_values, unique_indices = _deduplicate_indexed_slices(
values=grad.values, indices=grad.indices)
gradient_no_duplicate_indices = ops.IndexedSlices(
indices=unique_indices,
values=summed_values,
dense_shape=grad.dense_shape)
return self._apply_sparse(gradient_no_duplicate_indices, var)
def _apply_sparse(self, grad, var):
"""Add ops to apply sparse gradients to `var`.
The IndexedSlices object passed to `grad` in this function is by default
pre-processed in `_apply_sparse_duplicate_indices` to remove duplicate
indices (see its docstring for details). Optimizers which can tolerate or
have correct special cases for duplicate sparse indices may override
`_apply_sparse_duplicate_indices` instead of this function, avoiding that
overhead.
Args:
grad: `IndexedSlices`, with no repeated indices.
var: A `Variable` object.
Returns:
An `Operation`.
"""
raise NotImplementedError()
def _finish(self, update_ops, name_scope):
"""Do what is needed to finish the update.
This is called with the `name_scope` using the "name" that
users have chosen for the application of gradients.
Args:
update_ops: List of `Operation` objects to update variables. This list
contains the values returned by the `_apply_dense()` and
`_apply_sparse()` calls.
name_scope: String. Name to use for the returned operation.
Returns:
The operation to apply updates.
"""
return control_flow_ops.group(*update_ops, name=name_scope)
# --------------
# Utility methods for subclasses.
# --------------
def _slot_dict(self, slot_name):
"""Returns a dict for caching slots created under the given name.
Args:
slot_name: Name for the slot.
Returns:
A dict that maps primary `Variable` objects to the slot created
for that variable, under the given slot name.
"""
named_slots = self._slots.get(slot_name, None)
if named_slots is None:
named_slots = {}
self._slots[slot_name] = named_slots
return named_slots
def _get_or_make_slot(self, var, val, slot_name, op_name):
"""Find or create a slot for a variable.
Args:
var: A `Variable` object.
val: A `Tensor`. The initial value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_slot(var, val, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
def _get_or_make_slot_with_initializer(self, var, initializer, shape, dtype,
slot_name, op_name):
"""Find or create a slot for a variable, using an Initializer.
Args:
var: A `Variable` object.
initializer: An `Initializer`. The initial value of the slot.
shape: Shape of the initial value of the slot.
dtype: Type of the value of the slot.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_slot_with_initializer(
var, initializer, shape, dtype, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
def _zeros_slot(self, var, slot_name, op_name):
"""Find or create a slot initialized with 0.0.
Args:
var: A `Variable` object.
slot_name: Name for the slot.
op_name: Name to use when scoping the Variable that
needs to be created for the slot.
Returns:
A `Variable` object.
"""
named_slots = self._slot_dict(slot_name)
if _var_key(var) not in named_slots:
new_slot_variable = slot_creator.create_zeros_slot(var, op_name)
self._restore_slot_variable(
slot_name=slot_name, variable=var,
slot_variable=new_slot_variable)
named_slots[_var_key(var)] = new_slot_variable
return named_slots[_var_key(var)]
# --------------
# For implementing the Checkpointable interface.
# --------------
def _restore_slot_variable(self, slot_name, variable, slot_variable):
"""Restore a newly created slot variable's value."""
variable_key = _var_key(variable)
deferred_restorations = self._deferred_slot_restorations.get(
slot_name, {}).pop(variable_key, [])
# Iterate over restores, highest restore UID first to minimize the number
# of assignments.
deferred_restorations.sort(key=lambda position: position.restore_uid,
reverse=True)
for checkpoint_position in deferred_restorations:
checkpoint_position.restore(slot_variable)
def _create_or_restore_slot_variable(
self, slot_variable_position, slot_name, variable):
"""Restore a slot variable's value, possibly creating it.
Called when a variable which has an associated slot variable is created or
restored. When executing eagerly, we create the slot variable with a
restoring initializer.
No new variables are created when graph building. Instead,
_restore_slot_variable catches these after normal creation and adds restore
ops to the graph. This method is nonetheless important when graph building
for the case when a slot variable has already been created but `variable`
has just been added to a dependency graph (causing us to realize that the
slot variable needs to be restored).
Args:
slot_variable_position: A `checkpointable._CheckpointPosition` object
indicating the slot variable `Checkpointable` object to be restored.
slot_name: The name of this `Optimizer`'s slot to restore into.
variable: The variable object this slot is being created for.
"""
named_slots = self._slot_dict(slot_name)
variable_key = _var_key(variable)
slot_variable = named_slots.get(variable_key, None)
if (slot_variable is None and context.executing_eagerly() and
slot_variable_position.is_simple_variable()
# Defer slot variable creation if there is an active variable creator
# scope. Generally we'd like to eagerly create/restore slot variables
# when possible, but this may mean that scopes intended to catch
# `variable` also catch its eagerly created slot variable
# unintentionally (specifically make_template would add a dependency on
# a slot variable if not for this case). Deferring is mostly harmless
# (aside from double initialization), and makes variable creator scopes
# behave the same way they do when graph building.
and not ops.get_default_graph()._variable_creator_stack): # pylint: disable=protected-access
initializer = checkpointable.CheckpointInitialValue(
checkpoint_position=slot_variable_position)
slot_variable = self._get_or_make_slot(
var=variable,
val=initializer,
slot_name=slot_name,
op_name=self._name)
# Slot variables are not owned by any one object (because we don't want to
# save the slot variable if the optimizer is saved without the non-slot
# variable, or if the non-slot variable is saved without the optimizer;
# it's a dependency hypergraph with edges of the form (optimizer, non-slot
# variable, variable)). So we don't _track_ slot variables anywhere, and
# instead special-case this dependency and otherwise pretend it's a normal
# graph.
if slot_variable is not None:
# If we've either made this slot variable, or if we've pulled out an
# existing slot variable, we should restore it.
slot_variable_position.restore(slot_variable)
else:
# We didn't make the slot variable. Defer restoring until it gets created
# normally. We keep a list rather than the one with the highest restore
# UID in case slot variables have their own dependencies, in which case
# those could differ between restores.
self._deferred_slot_restorations.setdefault(
slot_name, {}).setdefault(variable_key, []).append(
slot_variable_position)
def _call_if_callable(self, param):
"""Call the function if param is callable."""
return param() if callable(param) else param
|
{
"content_hash": "705a357ed02126c5d2187620d43cdad2",
"timestamp": "",
"source": "github",
"line_count": 1208,
"max_line_length": 115,
"avg_line_length": 39.937086092715234,
"alnum_prop": 0.6731199734682033,
"repo_name": "xodus7/tensorflow",
"id": "699162b30c7eae7340d3af3db03a9d1c12889971",
"size": "48934",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/training/optimizer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1286"
},
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "340946"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "48861698"
},
{
"name": "CMake",
"bytes": "195699"
},
{
"name": "Dockerfile",
"bytes": "36400"
},
{
"name": "Go",
"bytes": "1240309"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "834061"
},
{
"name": "Jupyter Notebook",
"bytes": "2604756"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52618"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "40952138"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "459258"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
"""lambda-uploader - Simple way to create and upload python lambda jobs"""
from __future__ import print_function
import sys
import logging
import traceback
import lambda_uploader
from os import getcwd, path, getenv
from lambda_uploader import package, config, uploader, subscribers
from boto3 import __version__ as boto3_version
from botocore import __version__ as botocore_version
LOG = logging.getLogger(__name__)
NAMESPACE = 'rax_jira'
CHECK = '✅'
INTERROBANG = '‽'
RED_X = '❌'
LAMBDA = 'λ'
TRACEBACK_MESSAGE = """%s Unexpected error. Please report this traceback.
Uploader: %s
Botocore: %s
Boto3: %s
"""
# Used for stdout for shell
def _print(txt):
# Windows Powershell doesn't support Unicode
if sys.platform == 'win32' or sys.platform == 'cygwin':
print(txt)
else:
# Add the lambda symbol
print("%s %s" % (LAMBDA, txt))
def _execute(args):
pth = path.abspath(args.function_dir)
cfg = config.Config(pth, args.config, role=args.role,
variables=args.variables)
if args.s3_bucket:
cfg.set_s3(args.s3_bucket, args.s3_key)
if args.no_virtualenv:
# specified flag to omit entirely
venv = False
elif args.virtualenv:
# specified a custom virtualenv
venv = args.virtualenv
else:
# build and include virtualenv, the default
venv = None
if args.no_build:
pkg = package.create_package(pth)
else:
_print('Building Package')
requirements = cfg.requirements
if args.requirements:
requirements = path.abspath(args.requirements)
extra_files = cfg.extra_files
if args.extra_files:
extra_files = args.extra_files
pkg = package.build_package(pth, requirements,
venv, cfg.ignore, extra_files,
pyexec=cfg.runtime)
if not args.no_clean:
pkg.clean_workspace()
if not args.no_upload:
# Set publish if flagged to do so
if args.publish:
cfg.set_publish()
create_alias = False
# Set alias if the arg is passed
if args.alias is not None:
cfg.set_alias(args.alias, args.alias_description)
create_alias = True
_print('Uploading Package')
upldr = uploader.PackageUploader(cfg, args.profile)
upldr.upload(pkg)
# If the alias was set create it
if create_alias:
upldr.alias()
if cfg.subscription:
_print('Creating subscription')
subscribers.create_subscriptions(cfg, args.profile)
pkg.clean_zipfile()
_print('Fin')
def main(arv=None):
"""lambda-uploader command line interface."""
# Check for Python 2.7 or later
if sys.version_info[0] < 3 and not sys.version_info[1] == 7:
raise RuntimeError('lambda-uploader requires Python 2.7 or later')
import argparse
parser = argparse.ArgumentParser(
description='Simple way to create and upload python lambda jobs')
parser.add_argument('--version', '-v', action='version',
version=lambda_uploader.__version__)
parser.add_argument('--no-upload', dest='no_upload',
action='store_const', help='dont upload the zipfile',
const=True)
parser.add_argument('--no-clean', dest='no_clean',
action='store_const',
help='dont cleanup the temporary workspace',
const=True)
parser.add_argument('--publish', '-p', dest='publish',
action='store_const',
help='publish an upload to an immutable version',
const=True)
parser.add_argument('--virtualenv', '-e',
help='use specified virtualenv instead of making one',
default=None)
parser.add_argument('--extra-files', '-x',
action='append',
help='include file or directory path in package',
default=[])
parser.add_argument('--no-virtualenv', dest='no_virtualenv',
action='store_const',
help='do not create or include a virtualenv at all',
const=True)
parser.add_argument('--role', dest='role',
default=getenv('LAMBDA_UPLOADER_ROLE'),
help=('IAM role to assign the lambda function, '
'can be set with $LAMBDA_UPLOADER_ROLE'))
parser.add_argument('--variables', dest='variables',
help='add environment variables')
parser.add_argument('--profile', dest='profile',
help='specify AWS cli profile')
parser.add_argument('--requirements', '-r', dest='requirements',
help='specify a requirements.txt file')
alias_help = 'alias for published version (WILL SET THE PUBLISH FLAG)'
parser.add_argument('--alias', '-a', dest='alias',
default=None, help=alias_help)
parser.add_argument('--alias-description', '-m', dest='alias_description',
default=None, help='alias description')
parser.add_argument('--s3-bucket', '-s', dest='s3_bucket',
help='S3 bucket to store the lambda function in',
default=None)
parser.add_argument('--s3-key', '-k', dest='s3_key',
help='Key name of the lambda function s3 object',
default=None)
parser.add_argument('--config', '-c', help='Overrides lambda.json',
default='lambda.json')
parser.add_argument('function_dir', default=getcwd(), nargs='?',
help='lambda function directory')
parser.add_argument('--no-build', dest='no_build',
action='store_const', help='dont build the sourcecode',
const=True)
verbose = parser.add_mutually_exclusive_group()
verbose.add_argument('-V', dest='loglevel', action='store_const',
const=logging.INFO,
help="Set log-level to INFO.")
verbose.add_argument('-VV', dest='loglevel', action='store_const',
const=logging.DEBUG,
help="Set log-level to DEBUG.")
parser.set_defaults(loglevel=logging.WARNING)
args = parser.parse_args()
logging.basicConfig(level=args.loglevel)
try:
_execute(args)
except Exception:
print(TRACEBACK_MESSAGE
% (INTERROBANG, lambda_uploader.__version__,
boto3_version, botocore_version),
file=sys.stderr)
traceback.print_exc()
sys.stderr.flush()
sys.exit(1)
|
{
"content_hash": "3cd2e1b88571d4aa008c1f22a47fcc4f",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 79,
"avg_line_length": 36.77127659574468,
"alnum_prop": 0.5660350065094749,
"repo_name": "rackerlabs/lambda-uploader",
"id": "14de0987e6b53b46eaa23448a6bc25143fae84ad",
"size": "7531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lambda_uploader/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "59963"
}
],
"symlink_target": ""
}
|
"""HTTP request and response messages
.. seealso:: :rfc:`2616#section-4`
"""
import re
from httoop.exceptions import InvalidLine
from httoop.meta import HTTPSemantic
from httoop.six import with_metaclass
from httoop.util import Unicode, _
__all__ = ('Method', )
class Method(with_metaclass(HTTPSemantic)):
u"""A HTTP request method"""
__slots__ = ('__method')
@property
def safe(self):
return self in self.safe_methods
@property
def idempotent(self):
return self in self.idempotent_methods
safe_methods = (u'GET', u'HEAD', u'SEARCH')
idempotent_methods = (u'GET', u'HEAD', u'PUT', u'DELETE', u'OPTIONS', u'TRACE', u'SEARCH')
METHOD_RE = re.compile(br"^[A-Z0-9$-_.]{1,20}\Z", re.IGNORECASE)
def __init__(self, method=None):
self.set(method or u'GET')
def __hash__(self):
return hash(bytes(self))
def set(self, method):
if isinstance(method, Unicode):
method = method.encode('ASCII')
self.parse(method)
def parse(self, method):
if not self.METHOD_RE.match(method):
raise InvalidLine(_(u"Invalid method: %r"), method.decode('ISO8859-1'))
self.__method = method
def compose(self):
return self.__method
|
{
"content_hash": "7c855fe076e941490fd08c57411a73c1",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 91,
"avg_line_length": 23.46938775510204,
"alnum_prop": 0.6808695652173913,
"repo_name": "spaceone/httoop",
"id": "5c876aeea5e23cf585da09ada1a8d2c11521be3f",
"size": "1174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "httoop/messages/method.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "384542"
},
{
"name": "Makefile",
"bytes": "203"
},
{
"name": "Python",
"bytes": "1369577"
},
{
"name": "Shell",
"bytes": "199"
}
],
"symlink_target": ""
}
|
import os
# import socket
# my_ips = socket.gethostbyname_ex(socket.gethostname())
from datetime import timedelta
import jsonschema
import ipaddress
import re
basedir = os.path.abspath(os.path.dirname(__file__))
@jsonschema.FormatChecker.cls_checks('cidr')
def _validate_cidr_format(cidr):
"""Validate CIDR IP range
:param str cidr:
:return:
:rtype: bool
"""
try:
ipaddress.ip_network(cidr, strict=False)
except (ValueError, ipaddress.AddressValueError,
ipaddress.NetmaskValueError):
return False
if '/' not in cidr:
return False
if re.search('\s', cidr):
return False
return True
@jsonschema.FormatChecker.cls_checks('gpg_pubkey')
def _validate_gpg_pubkey_format(pubkey):
"""Validate GPG public key format
:param str pubkey: ASCII armored public key
:return:
:rtype: bool
: ref:: https://stackoverflow.com/questions/24238743/flask-decorator-to-
verify-json-and-json-schema
"""
prefix = '-----BEGIN PGP PUBLIC KEY BLOCK-----'
suffix = '-----END PGP PUBLIC KEY BLOCK-----'
if not pubkey.startswith(prefix) and not pubkey.endswith(suffix):
return False
return True
class Config:
"""Main configuration class
Options here are overwritten by child configuration.
Configuration options can also be overwritten by setting the
``DO_LOCAL_CONFIG`` envinronment variable to a configuration file.
"""
#: Secret key
SECRET_KEY = os.environ.get('SECRET_KEY')
#: CSRF secret key
WTF_CSRF_SECRET_KEY = ''
LOGGER_NAME = 'doportal'
#: SQL settings
SQLALCHEMY_DATABASE_URI = ''
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = False
#: Root PATH of deployment
ROOT = os.path.dirname(os.path.abspath(__file__))
APP_ROOT = os.path.join(ROOT, 'app')
#: Web root
APP_STATIC = os.path.join(APP_ROOT, 'static')
APP_DATA = os.path.join(APP_STATIC, 'data')
#: Uploaded files will be stored here
APP_UPLOADS = os.path.join(APP_DATA, 'uploads')
#: Uploaded malware samples will be store here.
#: In production this is on a different disk mounted with noexec options
APP_UPLOADS_SAMPLES = os.path.join(APP_DATA, 'samples')
APP_UPLOADS_SAMPLES_TMP = os.path.join(APP_UPLOADS_SAMPLES, 'tmp')
LOG_DIR = os.path.join(ROOT, 'logs')
MISC_DIR = os.path.join(ROOT, 'misc')
MIGRATIONS_DIR = os.path.join(MISC_DIR, 'migrations')
JSONSCHEMA_DIR = os.path.join(MISC_DIR, 'json_schemas')
JSONSCHEMA_FORMAT_CHECKER = jsonschema.FormatChecker()
#: Analysis report files location
#: Big report files are stored here
REPORTS_PATH = os.path.join(APP_STATIC, 'reports')
#: Antivirus scan reports
REPORTS_AV_PATH = os.path.join(REPORTS_PATH, 'av')
#: Static analysis reports
REPORTS_STATIC_PATH = os.path.join(REPORTS_PATH, 'static')
#: Dynamic analysis reports
REPORTS_DYNAMIC_PATH = os.path.join(REPORTS_PATH, 'dynamic')
ADMINS = []
#: The number of items per page used for paginated data
ITEMS_PER_PAGE = 20
#: Email server
MAIL_SERVER = '127.0.0.1'
#: Email port
MAIL_PORT = 25 # 465
#: Use TLS
MAIL_USE_TLS = False
#: Use SSL
MAIL_USE_SSL = False
#: E-mail useranme
MAIL_USERNAME = ''
#: E-mail password
MAIL_PASSWORD = None
MAIL_DEFAULT_SENDER = ''
# Session
PERMANENT_SESSION_LIFETIME = timedelta(minutes=30)
REMEMBER_COOKIE_DURATION = timedelta(days=2)
REMEMBER_COOKIE_NAME = 'rm'
#: Client sessions will be store here
SESSION_COOKIE_SECURE = True
REMEMBER_COOKIE_HTTPONLY = True
#: RabbitMQ broker for Celery
#: Can also be passed from ENV as CELERY_BROKER_URL
BROKER_URL = ''
#: The backend used to store task results
CELERY_BACKEND = ''
CELERY_RESULT_DB_TABLENAMES = {
'task': 'tasks_taskmeta',
'group': 'tasks_groupmeta'
}
#: Accepted content
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
#: Modules that are expected to use Celery
CELERY_IMPORTS = ['app.tasks']
#: http://docs.celeryproject.org/en/latest/userguide/periodic-tasks.html
#: Scheduled tasks require beat running:
#: venv/bin/celery beat -A tasks.celery -l debug
CELERYBEAT_SCHEDULE = {}
CELERY_TIMEZONE = 'Europe/Brussels'
#: Mailman API version to use (3.0 or 3.1)
MAILMAN_REST_API_VERSION = '3.1' # 3.0, 3.1
#: Mailman API base URL
MAILMAN_REST_API_URL = ''
#: Mailman API username
MAILMAN_REST_API_USER = ''
#: Mailman API password
MAILMAN_REST_API_PASS = ''
MAILMAN_DOMAIN = ''
MAILMAN_ADMIN = ''
#: Syslog host
SYSLOG_HOST = None
#: Syslog port
SYSLOG_PORT = None
#: Use Lightweight Directory Access Protocol (LDAP) for authentication
LDAP_AUTH_ENABLED = False
#: LDAP host
LDAP_HOST = ''
#: LDAP base distinguished name
LDAP_BASE_DN = ''
#: LDAP user distinguished name
LDAP_USER_DN = ''
LDAP_SEARCH_FOR_GROUPS = False
#: LDAP bind user distinguished name
LDAP_BIND_USER_DN = ''
#: Service account password used for bind searching
LDAP_BIND_USER_PASSWORD = ''
LDAP_USER_LOGIN_ATTR = ''
#: GnuPG home directoy
GPG_HOME = ""
#: Full PATH of the gpg binary
GPG_BINARY = "/usr/local/bin/gpg"
#: GnuPG keyservers to use.
#: First keyserver is considered local, second one public.
GPG_KEYSERVERS = []
#: Extra option to pass to the gpg binary
GPG_OPTIONS = ['--batch', '--no-tty', '--yes', '--keyserver-options',
'no-honor-keyserver-url,timeout=3']
#: Verbose output of GPG commands
GPG_VERBOSE = True
#: Enable BOSH connections
BOSH_ENABLED = False
#: Full URL of the BOSH service URL. It will be passed to clients.
BOSH_SERVICE = ''
CP_BOSH_SERVICE = ''
#: Jabber ID. Account needs to be present on the AbuseHelper server.
JID = '' # append customer resource
#: Jabber password
JPASS = ''
#: List of rooms to join
ROOMS = []
CP_ROOMS = []
#: Full PATH to multi AV configuration file
AVSCAN_CONFIG = ''
#: VxStream Sandbox API base URL
REST_CLIENT_VX_BASE_URL = None
#: VxStream Sandbox API key
REST_CLIENT_VX_API_KEY = None
#: VxStream Sandbox API secret
REST_CLIENT_VX_API_SECRET = None
#: VxStream default environment
REST_CLIENT_VX_DEFAULT_ENV = 1
#: Nessus appscan credentials
REST_CLIENT_NESSUS_BASE_URL = None
REST_CLIENT_NESSUS_API_KEY = None
REST_CLIENT_NESSUS_API_SECRET = None
#: Nessus templates UUIDs available for customers
REST_CLIENT_NESSUS_TEMPLATES = None
#: FireEye AX credentials
REST_CLIENT_FIREEYE_BASE_URL = None
REST_CLIENT_FIREEYE_USERNAME = None
REST_CLIENT_FIREEYE_API_SECRET = None
#: Customer portal web root URL
CP_WEB_ROOT = ''
#: Password user for archiving infected files
INFECTED_PASSWD = 'infected'
PROXIES = {}
class DevelConfig(Config):
DEBUG = True
TRAP_BAD_REQUEST_ERRORS = True
ASSETS_DEBUG = True
SQLALCHEMY_ECHO = False
SECRET_KEY = os.environ.get('SECRET_KEY') or 'devkey'
WTF_CSRF_ENABLED = False
WTF_CSRF_SECRET_KEY = os.environ.get('WTF_CSRF_SECRET_KEY') or 'devkey'
SQLALCHEMY_DATABASE_URI = ''
SQLALCHEMY_TRACK_MODIFICATIONS = False
class TestingConfig(Config):
TESTING = True
SERVER_NAME = 'localhost'
SECRET_KEY = os.environ.get('SECRET_KEY') or 'do-testing'
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = ''
SQLALCHEMY_TRACK_MODIFICATIONS = False
BROKER_URL = ''
CELERY_BACKEND = ''
CELERY_ALWAYS_EAGER = True
LDAP_AUTH_ENABLED = True
BOSH_ENABLED = True
class ProductionConfig(Config):
JSON_AS_ASCII = False
JSONIFY_PRETTYPRINT_REGULAR = False
CSRF_ENABLED = True
FILE_LOGGING = False
SMTP_LOGGING = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
config = {
'devel': DevelConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelConfig
}
|
{
"content_hash": "dca08bbea2a79a9105d384c898d11de1",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 76,
"avg_line_length": 30.111524163568774,
"alnum_prop": 0.6517283950617284,
"repo_name": "certeu/do-portal",
"id": "9978665d90f4925e848343cc73e02fb1040f7d35",
"size": "8100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "31516"
},
{
"name": "HTML",
"bytes": "241648"
},
{
"name": "JavaScript",
"bytes": "84093"
},
{
"name": "Makefile",
"bytes": "3016"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "480459"
}
],
"symlink_target": ""
}
|
"""DRBD block device related functionality"""
import errno
import logging
import time
from ganeti import constants
from ganeti import utils
from ganeti import errors
from ganeti import netutils
from ganeti import objects
from ganeti.storage import base
from ganeti.storage.drbd_info import DRBD8Info
from ganeti.storage import drbd_info
from ganeti.storage import drbd_cmdgen
# Size of reads in _CanReadDevice
_DEVICE_READ_SIZE = 128 * 1024
class DRBD8(object):
"""Various methods to deals with the DRBD system as a whole.
This class provides a set of methods to deal with the DRBD installation on
the node or with uninitialized devices as opposed to a DRBD device.
"""
_USERMODE_HELPER_FILE = "/sys/module/drbd/parameters/usermode_helper"
_MAX_MINORS = 255
@staticmethod
def GetUsermodeHelper(filename=_USERMODE_HELPER_FILE):
"""Returns DRBD usermode_helper currently set.
@type filename: string
@param filename: the filename to read the usermode helper from
@rtype: string
@return: the currently configured DRBD usermode helper
"""
try:
helper = utils.ReadFile(filename).splitlines()[0]
except EnvironmentError, err:
if err.errno == errno.ENOENT:
base.ThrowError("The file %s cannot be opened, check if the module"
" is loaded (%s)", filename, str(err))
else:
base.ThrowError("Can't read DRBD helper file %s: %s",
filename, str(err))
if not helper:
base.ThrowError("Can't read any data from %s", filename)
return helper
@staticmethod
def GetProcInfo():
"""Reads and parses information from /proc/drbd.
@rtype: DRBD8Info
@return: a L{DRBD8Info} instance containing the current /proc/drbd info
"""
return DRBD8Info.CreateFromFile()
@staticmethod
def GetUsedDevs():
"""Compute the list of used DRBD minors.
@rtype: list of ints
"""
info = DRBD8.GetProcInfo()
return [m for m in info.GetMinors()
if not info.GetMinorStatus(m).is_unconfigured]
@staticmethod
def FindUnusedMinor():
"""Find an unused DRBD device.
This is specific to 8.x as the minors are allocated dynamically,
so non-existing numbers up to a max minor count are actually free.
@rtype: int
"""
highest = None
info = DRBD8.GetProcInfo()
for minor in info.GetMinors():
status = info.GetMinorStatus(minor)
if not status.is_in_use:
return minor
highest = max(highest, minor)
if highest is None: # there are no minors in use at all
return 0
if highest >= DRBD8._MAX_MINORS:
logging.error("Error: no free drbd minors!")
raise errors.BlockDeviceError("Can't find a free DRBD minor")
return highest + 1
@staticmethod
def GetCmdGenerator(info):
"""Creates a suitable L{BaseDRBDCmdGenerator} based on the given info.
@type info: DRBD8Info
@rtype: BaseDRBDCmdGenerator
"""
version = info.GetVersion()
if version["k_minor"] <= 3:
return drbd_cmdgen.DRBD83CmdGenerator(version)
else:
return drbd_cmdgen.DRBD84CmdGenerator(version)
@staticmethod
def ShutdownAll(minor):
"""Deactivate the device.
This will, of course, fail if the device is in use.
@type minor: int
@param minor: the minor to shut down
"""
info = DRBD8.GetProcInfo()
cmd_gen = DRBD8.GetCmdGenerator(info)
cmd = cmd_gen.GenDownCmd(minor)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't shutdown drbd device: %s",
minor, result.output)
class DRBD8Dev(base.BlockDev):
"""DRBD v8.x block device.
This implements the local host part of the DRBD device, i.e. it
doesn't do anything to the supposed peer. If you need a fully
connected DRBD pair, you need to use this class on both hosts.
The unique_id for the drbd device is a (pnode_uuid, snode_uuid,
port, pnode_minor, lnode_minor, secret) tuple, and it must have
two children: the data device and the meta_device. The meta
device is checked for valid size and is zeroed on create.
"""
_DRBD_MAJOR = 147
# timeout constants
_NET_RECONFIG_TIMEOUT = 60
def __init__(self, unique_id, children, size, params, dyn_params, **kwargs):
if children and children.count(None) > 0:
children = []
if len(children) not in (0, 2):
raise ValueError("Invalid configuration data %s" % str(children))
if not isinstance(unique_id, (tuple, list)) or len(unique_id) != 6:
raise ValueError("Invalid configuration data %s" % str(unique_id))
if constants.DDP_LOCAL_IP not in dyn_params or \
constants.DDP_REMOTE_IP not in dyn_params or \
constants.DDP_LOCAL_MINOR not in dyn_params or \
constants.DDP_REMOTE_MINOR not in dyn_params:
raise ValueError("Invalid dynamic parameters %s" % str(dyn_params))
self._lhost = dyn_params[constants.DDP_LOCAL_IP]
self._lport = unique_id[2]
self._rhost = dyn_params[constants.DDP_REMOTE_IP]
self._rport = unique_id[2]
self._aminor = dyn_params[constants.DDP_LOCAL_MINOR]
# The secret is wrapped in the Private data type, and it has to be extracted
# before use
self._secret = unique_id[5].Get()
if children:
if not _CanReadDevice(children[1].dev_path):
logging.info("drbd%s: Ignoring unreadable meta device", self._aminor)
children = []
super(DRBD8Dev, self).__init__(unique_id, children, size, params,
dyn_params, **kwargs)
self.major = self._DRBD_MAJOR
info = DRBD8.GetProcInfo()
version = info.GetVersion()
if version["k_major"] != 8:
base.ThrowError("Mismatch in DRBD kernel version and requested ganeti"
" usage: kernel is %s.%s, ganeti wants 8.x",
version["k_major"], version["k_minor"])
if version["k_minor"] <= 3:
self._show_info_cls = drbd_info.DRBD83ShowInfo
else:
self._show_info_cls = drbd_info.DRBD84ShowInfo
self._cmd_gen = DRBD8.GetCmdGenerator(info)
if (self._lhost is not None and self._lhost == self._rhost and
self._lport == self._rport):
raise ValueError("Invalid configuration data, same local/remote %s, %s" %
(unique_id, dyn_params))
self.Attach()
@staticmethod
def _DevPath(minor):
"""Return the path to a drbd device for a given minor.
@type minor: int
@rtype: string
"""
return "/dev/drbd%d" % minor
def _SetFromMinor(self, minor):
"""Set our parameters based on the given minor.
This sets our minor variable and our dev_path.
@type minor: int
"""
if minor is None:
self.minor = self.dev_path = None
self.attached = False
else:
self.minor = minor
self.dev_path = self._DevPath(minor)
self.attached = True
@staticmethod
def _CheckMetaSize(meta_device):
"""Check if the given meta device looks like a valid one.
This currently only checks the size, which must be around
128MiB.
@type meta_device: string
@param meta_device: the path to the device to check
"""
result = utils.RunCmd(["blockdev", "--getsize", meta_device])
if result.failed:
base.ThrowError("Failed to get device size: %s - %s",
result.fail_reason, result.output)
try:
sectors = int(result.stdout)
except (TypeError, ValueError):
base.ThrowError("Invalid output from blockdev: '%s'", result.stdout)
num_bytes = sectors * 512
if num_bytes < 128 * 1024 * 1024: # less than 128MiB
base.ThrowError("Meta device too small (%.2fMib)",
(num_bytes / 1024 / 1024))
# the maximum *valid* size of the meta device when living on top
# of LVM is hard to compute: it depends on the number of stripes
# and the PE size; e.g. a 2-stripe, 64MB PE will result in a 128MB
# (normal size), but an eight-stripe 128MB PE will result in a 1GB
# size meta device; as such, we restrict it to 1GB (a little bit
# too generous, but making assumptions about PE size is hard)
if num_bytes > 1024 * 1024 * 1024:
base.ThrowError("Meta device too big (%.2fMiB)",
(num_bytes / 1024 / 1024))
def _GetShowData(self, minor):
"""Return the `drbdsetup show` data.
@type minor: int
@param minor: the minor to collect show output for
@rtype: string
"""
result = utils.RunCmd(self._cmd_gen.GenShowCmd(minor))
if result.failed:
logging.error("Can't display the drbd config: %s - %s",
result.fail_reason, result.output)
return None
return result.stdout
def _GetShowInfo(self, minor):
"""Return parsed information from `drbdsetup show`.
@type minor: int
@param minor: the minor to return information for
@rtype: dict as described in L{drbd_info.BaseShowInfo.GetDevInfo}
"""
return self._show_info_cls.GetDevInfo(self._GetShowData(minor))
def _MatchesLocal(self, info):
"""Test if our local config matches with an existing device.
The parameter should be as returned from `_GetShowInfo()`. This
method tests if our local backing device is the same as the one in
the info parameter, in effect testing if we look like the given
device.
@type info: dict as described in L{drbd_info.BaseShowInfo.GetDevInfo}
@rtype: boolean
"""
if self._children:
backend, meta = self._children
else:
backend = meta = None
if backend is not None:
retval = ("local_dev" in info and info["local_dev"] == backend.dev_path)
else:
retval = ("local_dev" not in info)
if meta is not None:
retval = retval and ("meta_dev" in info and
info["meta_dev"] == meta.dev_path)
if "meta_index" in info:
retval = retval and info["meta_index"] == 0
else:
retval = retval and ("meta_dev" not in info and
"meta_index" not in info)
return retval
def _MatchesNet(self, info):
"""Test if our network config matches with an existing device.
The parameter should be as returned from `_GetShowInfo()`. This
method tests if our network configuration is the same as the one
in the info parameter, in effect testing if we look like the given
device.
@type info: dict as described in L{drbd_info.BaseShowInfo.GetDevInfo}
@rtype: boolean
"""
if (((self._lhost is None and not ("local_addr" in info)) and
(self._rhost is None and not ("remote_addr" in info)))):
return True
if self._lhost is None:
return False
if not ("local_addr" in info and
"remote_addr" in info):
return False
retval = (info["local_addr"] == (self._lhost, self._lport))
retval = (retval and
info["remote_addr"] == (self._rhost, self._rport))
return retval
def _AssembleLocal(self, minor, backend, meta, size):
"""Configure the local part of a DRBD device.
@type minor: int
@param minor: the minor to assemble locally
@type backend: string
@param backend: path to the data device to use
@type meta: string
@param meta: path to the meta device to use
@type size: int
@param size: size in MiB
"""
cmds = self._cmd_gen.GenLocalInitCmds(minor, backend, meta,
size, self.params)
for cmd in cmds:
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't attach local disk: %s",
minor, result.output)
def _AssembleNet(self, minor, net_info, dual_pri=False, hmac=None,
secret=None):
"""Configure the network part of the device.
@type minor: int
@param minor: the minor to assemble the network for
@type net_info: (string, int, string, int)
@param net_info: tuple containing the local address, local port, remote
address and remote port
@type dual_pri: boolean
@param dual_pri: whether two primaries should be allowed or not
@type hmac: string
@param hmac: the HMAC algorithm to use
@type secret: string
@param secret: the shared secret to use
"""
lhost, lport, rhost, rport = net_info
if None in net_info:
# we don't want network connection and actually want to make
# sure its shutdown
self._ShutdownNet(minor)
return
if dual_pri:
protocol = constants.DRBD_MIGRATION_NET_PROTOCOL
else:
protocol = self.params[constants.LDP_PROTOCOL]
# Workaround for a race condition. When DRBD is doing its dance to
# establish a connection with its peer, it also sends the
# synchronization speed over the wire. In some cases setting the
# sync speed only after setting up both sides can race with DRBD
# connecting, hence we set it here before telling DRBD anything
# about its peer.
sync_errors = self._SetMinorSyncParams(minor, self.params)
if sync_errors:
base.ThrowError("drbd%d: can't set the synchronization parameters: %s" %
(minor, utils.CommaJoin(sync_errors)))
family = self._GetNetFamily(minor, lhost, rhost)
cmd = self._cmd_gen.GenNetInitCmd(minor, family, lhost, lport,
rhost, rport, protocol,
dual_pri, hmac, secret, self.params)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't setup network: %s - %s",
minor, result.fail_reason, result.output)
def _CheckNetworkConfig():
info = self._GetShowInfo(minor)
if not "local_addr" in info or not "remote_addr" in info:
raise utils.RetryAgain()
if (info["local_addr"] != (lhost, lport) or
info["remote_addr"] != (rhost, rport)):
raise utils.RetryAgain()
try:
utils.Retry(_CheckNetworkConfig, 1.0, 10.0)
except utils.RetryTimeout:
base.ThrowError("drbd%d: timeout while configuring network", minor)
# Once the assembly is over, try to set the synchronization parameters
try:
# The minor may not have been set yet, requiring us to set it at least
# temporarily
old_minor = self.minor
self._SetFromMinor(minor)
sync_errors = self.SetSyncParams(self.params)
if sync_errors:
base.ThrowError("drbd%d: can't set the synchronization parameters: %s" %
(self.minor, utils.CommaJoin(sync_errors)))
finally:
# Undo the change, regardless of whether it will have to be done again
# soon
self._SetFromMinor(old_minor)
@staticmethod
def _GetNetFamily(minor, lhost, rhost):
if netutils.IP6Address.IsValid(lhost):
if not netutils.IP6Address.IsValid(rhost):
base.ThrowError("drbd%d: can't connect ip %s to ip %s" %
(minor, lhost, rhost))
return "ipv6"
elif netutils.IP4Address.IsValid(lhost):
if not netutils.IP4Address.IsValid(rhost):
base.ThrowError("drbd%d: can't connect ip %s to ip %s" %
(minor, lhost, rhost))
return "ipv4"
else:
base.ThrowError("drbd%d: Invalid ip %s" % (minor, lhost))
def AddChildren(self, devices):
"""Add a disk to the DRBD device.
@type devices: list of L{BlockDev}
@param devices: a list of exactly two L{BlockDev} objects; the first
denotes the data device, the second the meta device for this DRBD device
"""
if self.minor is None:
base.ThrowError("drbd%d: can't attach to dbrd8 during AddChildren",
self._aminor)
if len(devices) != 2:
base.ThrowError("drbd%d: need two devices for AddChildren", self.minor)
info = self._GetShowInfo(self.minor)
if "local_dev" in info:
base.ThrowError("drbd%d: already attached to a local disk", self.minor)
backend, meta = devices
if backend.dev_path is None or meta.dev_path is None:
base.ThrowError("drbd%d: children not ready during AddChildren",
self.minor)
backend.Open()
meta.Open()
self._CheckMetaSize(meta.dev_path)
self._InitMeta(DRBD8.FindUnusedMinor(), meta.dev_path)
self._AssembleLocal(self.minor, backend.dev_path, meta.dev_path, self.size)
self._children = devices
def RemoveChildren(self, devices):
"""Detach the drbd device from local storage.
@type devices: list of L{BlockDev}
@param devices: a list of exactly two L{BlockDev} objects; the first
denotes the data device, the second the meta device for this DRBD device
"""
if self.minor is None:
base.ThrowError("drbd%d: can't attach to drbd8 during RemoveChildren",
self._aminor)
# early return if we don't actually have backing storage
info = self._GetShowInfo(self.minor)
if "local_dev" not in info:
return
if len(self._children) != 2:
base.ThrowError("drbd%d: we don't have two children: %s", self.minor,
self._children)
if self._children.count(None) == 2: # we don't actually have children :)
logging.warning("drbd%d: requested detach while detached", self.minor)
return
if len(devices) != 2:
base.ThrowError("drbd%d: we need two children in RemoveChildren",
self.minor)
for child, dev in zip(self._children, devices):
if dev != child.dev_path:
base.ThrowError("drbd%d: mismatch in local storage (%s != %s) in"
" RemoveChildren", self.minor, dev, child.dev_path)
self._ShutdownLocal(self.minor)
self._children = []
def _SetMinorSyncParams(self, minor, params):
"""Set the parameters of the DRBD syncer.
This is the low-level implementation.
@type minor: int
@param minor: the drbd minor whose settings we change
@type params: dict
@param params: LD level disk parameters related to the synchronization
@rtype: list
@return: a list of error messages
"""
cmd = self._cmd_gen.GenSyncParamsCmd(minor, params)
result = utils.RunCmd(cmd)
if result.failed:
msg = ("Can't change syncer rate: %s - %s" %
(result.fail_reason, result.output))
logging.error(msg)
return [msg]
return []
def SetSyncParams(self, params):
"""Set the synchronization parameters of the DRBD syncer.
See L{BlockDev.SetSyncParams} for parameter description.
"""
if self.minor is None:
err = "Not attached during SetSyncParams"
logging.info(err)
return [err]
children_result = super(DRBD8Dev, self).SetSyncParams(params)
children_result.extend(self._SetMinorSyncParams(self.minor, params))
return children_result
def PauseResumeSync(self, pause):
"""Pauses or resumes the sync of a DRBD device.
See L{BlockDev.PauseResumeSync} for parameter description.
"""
if self.minor is None:
logging.info("Not attached during PauseSync")
return False
children_result = super(DRBD8Dev, self).PauseResumeSync(pause)
if pause:
cmd = self._cmd_gen.GenPauseSyncCmd(self.minor)
else:
cmd = self._cmd_gen.GenResumeSyncCmd(self.minor)
result = utils.RunCmd(cmd)
if result.failed:
logging.error("Can't %s: %s - %s", cmd,
result.fail_reason, result.output)
return not result.failed and children_result
def GetProcStatus(self):
"""Return the current status data from /proc/drbd for this device.
@rtype: DRBD8Status
"""
if self.minor is None:
base.ThrowError("drbd%d: GetStats() called while not attached",
self._aminor)
info = DRBD8.GetProcInfo()
if not info.HasMinorStatus(self.minor):
base.ThrowError("drbd%d: can't find myself in /proc", self.minor)
return info.GetMinorStatus(self.minor)
def GetSyncStatus(self):
"""Returns the sync status of the device.
If sync_percent is None, it means all is ok
If estimated_time is None, it means we can't estimate
the time needed, otherwise it's the time left in seconds.
We set the is_degraded parameter to True on two conditions:
network not connected or local disk missing.
We compute the ldisk parameter based on whether we have a local
disk or not.
@rtype: objects.BlockDevStatus
"""
if self.minor is None and not self.Attach():
base.ThrowError("drbd%d: can't Attach() in GetSyncStatus", self._aminor)
stats = self.GetProcStatus()
is_degraded = not stats.is_connected or not stats.is_disk_uptodate
if stats.is_disk_uptodate:
ldisk_status = constants.LDS_OKAY
elif stats.is_diskless:
ldisk_status = constants.LDS_FAULTY
elif stats.is_in_resync:
ldisk_status = constants.LDS_SYNC
else:
ldisk_status = constants.LDS_UNKNOWN
return objects.BlockDevStatus(dev_path=self.dev_path,
major=self.major,
minor=self.minor,
sync_percent=stats.sync_percent,
estimated_time=stats.est_time,
is_degraded=is_degraded,
ldisk_status=ldisk_status)
def Open(self, force=False, exclusive=True):
"""Make the local state primary.
If the 'force' parameter is given, DRBD is instructed to switch the device
into primary mode. Since this is a potentially dangerous operation, the
force flag should be only given after creation, when it actually is
mandatory.
"""
if self.minor is None and not self.Attach():
logging.error("DRBD cannot attach to a device during open")
return False
cmd = self._cmd_gen.GenPrimaryCmd(self.minor, force)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't make drbd device primary: %s", self.minor,
result.output)
def Close(self):
"""Make the local state secondary.
This will, of course, fail if the device is in use.
"""
if self.minor is None and not self.Attach():
base.ThrowError("drbd%d: can't Attach() in Close()", self._aminor)
cmd = self._cmd_gen.GenSecondaryCmd(self.minor)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't switch drbd device to secondary: %s",
self.minor, result.output)
def DisconnectNet(self):
"""Removes network configuration.
This method shutdowns the network side of the device.
The method will wait up to a hardcoded timeout for the device to
go into standalone after the 'disconnect' command before
re-configuring it, as sometimes it takes a while for the
disconnect to actually propagate and thus we might issue a 'net'
command while the device is still connected. If the device will
still be attached to the network and we time out, we raise an
exception.
"""
if self.minor is None:
base.ThrowError("drbd%d: disk not attached in re-attach net",
self._aminor)
if None in (self._lhost, self._lport, self._rhost, self._rport):
base.ThrowError("drbd%d: DRBD disk missing network info in"
" DisconnectNet()", self.minor)
class _DisconnectStatus(object):
def __init__(self, ever_disconnected):
self.ever_disconnected = ever_disconnected
dstatus = _DisconnectStatus(base.IgnoreError(self._ShutdownNet, self.minor))
def _WaitForDisconnect():
if self.GetProcStatus().is_standalone:
return
# retry the disconnect, it seems possible that due to a well-time
# disconnect on the peer, my disconnect command might be ignored and
# forgotten
dstatus.ever_disconnected = \
base.IgnoreError(self._ShutdownNet, self.minor) or \
dstatus.ever_disconnected
raise utils.RetryAgain()
# Keep start time
start_time = time.time()
try:
# Start delay at 100 milliseconds and grow up to 2 seconds
utils.Retry(_WaitForDisconnect, (0.1, 1.5, 2.0),
self._NET_RECONFIG_TIMEOUT)
except utils.RetryTimeout:
if dstatus.ever_disconnected:
msg = ("drbd%d: device did not react to the"
" 'disconnect' command in a timely manner")
else:
msg = "drbd%d: can't shutdown network, even after multiple retries"
base.ThrowError(msg, self.minor)
reconfig_time = time.time() - start_time
if reconfig_time > (self._NET_RECONFIG_TIMEOUT * 0.25):
logging.info("drbd%d: DisconnectNet: detach took %.3f seconds",
self.minor, reconfig_time)
def AttachNet(self, multimaster):
"""Reconnects the network.
This method connects the network side of the device with a
specified multi-master flag. The device needs to be 'Standalone'
but have valid network configuration data.
@type multimaster: boolean
@param multimaster: init the network in dual-primary mode
"""
if self.minor is None:
base.ThrowError("drbd%d: device not attached in AttachNet", self._aminor)
if None in (self._lhost, self._lport, self._rhost, self._rport):
base.ThrowError("drbd%d: missing network info in AttachNet()", self.minor)
status = self.GetProcStatus()
if not status.is_standalone:
base.ThrowError("drbd%d: device is not standalone in AttachNet",
self.minor)
self._AssembleNet(self.minor,
(self._lhost, self._lport, self._rhost, self._rport),
dual_pri=multimaster, hmac=constants.DRBD_HMAC_ALG,
secret=self._secret)
def Attach(self, **kwargs):
"""Check if our minor is configured.
This doesn't do any device configurations - it only checks if the
minor is in a state different from Unconfigured.
Note that this function will not change the state of the system in
any way (except in case of side-effects caused by reading from
/proc).
"""
used_devs = DRBD8.GetUsedDevs()
if self._aminor in used_devs:
minor = self._aminor
else:
minor = None
self._SetFromMinor(minor)
return minor is not None
def Assemble(self):
"""Assemble the drbd.
Method:
- if we have a configured device, we try to ensure that it matches
our config
- if not, we create it from zero
- anyway, set the device parameters
"""
super(DRBD8Dev, self).Assemble()
self.Attach()
if self.minor is None:
# local device completely unconfigured
self._FastAssemble()
else:
# we have to recheck the local and network status and try to fix
# the device
self._SlowAssemble()
def _SlowAssemble(self):
"""Assembles the DRBD device from a (partially) configured device.
In case of partially attached (local device matches but no network
setup), we perform the network attach. If successful, we re-test
the attach if can return success.
"""
# TODO: Rewrite to not use a for loop just because there is 'break'
# pylint: disable=W0631
net_data = (self._lhost, self._lport, self._rhost, self._rport)
for minor in (self._aminor,):
info = self._GetShowInfo(minor)
match_l = self._MatchesLocal(info)
match_r = self._MatchesNet(info)
if match_l and match_r:
# everything matches
break
if match_l and not match_r and "local_addr" not in info:
# disk matches, but not attached to network, attach and recheck
self._AssembleNet(minor, net_data, hmac=constants.DRBD_HMAC_ALG,
secret=self._secret)
if self._MatchesNet(self._GetShowInfo(minor)):
break
else:
base.ThrowError("drbd%d: network attach successful, but 'drbdsetup"
" show' disagrees", minor)
if match_r and "local_dev" not in info:
# no local disk, but network attached and it matches
self._AssembleLocal(minor, self._children[0].dev_path,
self._children[1].dev_path, self.size)
if self._MatchesLocal(self._GetShowInfo(minor)):
break
else:
base.ThrowError("drbd%d: disk attach successful, but 'drbdsetup"
" show' disagrees", minor)
# this case must be considered only if we actually have local
# storage, i.e. not in diskless mode, because all diskless
# devices are equal from the point of view of local
# configuration
if (match_l and "local_dev" in info and
not match_r and "local_addr" in info):
# strange case - the device network part points to somewhere
# else, even though its local storage is ours; as we own the
# drbd space, we try to disconnect from the remote peer and
# reconnect to our correct one
try:
self._ShutdownNet(minor)
except errors.BlockDeviceError, err:
base.ThrowError("drbd%d: device has correct local storage, wrong"
" remote peer and is unable to disconnect in order"
" to attach to the correct peer: %s", minor, str(err))
# note: _AssembleNet also handles the case when we don't want
# local storage (i.e. one or more of the _[lr](host|port) is
# None)
self._AssembleNet(minor, net_data, hmac=constants.DRBD_HMAC_ALG,
secret=self._secret)
if self._MatchesNet(self._GetShowInfo(minor)):
break
else:
base.ThrowError("drbd%d: network attach successful, but 'drbdsetup"
" show' disagrees", minor)
else:
minor = None
self._SetFromMinor(minor)
if minor is None:
base.ThrowError("drbd%d: cannot activate, unknown or unhandled reason",
self._aminor)
def _FastAssemble(self):
"""Assemble the drbd device from zero.
This is run when in Assemble we detect our minor is unused.
"""
minor = self._aminor
if self._children and self._children[0] and self._children[1]:
self._AssembleLocal(minor, self._children[0].dev_path,
self._children[1].dev_path, self.size)
if self._lhost and self._lport and self._rhost and self._rport:
self._AssembleNet(minor,
(self._lhost, self._lport, self._rhost, self._rport),
hmac=constants.DRBD_HMAC_ALG, secret=self._secret)
self._SetFromMinor(minor)
def _ShutdownLocal(self, minor):
"""Detach from the local device.
I/Os will continue to be served from the remote device. If we
don't have a remote device, this operation will fail.
@type minor: int
@param minor: the device to detach from the local device
"""
cmd = self._cmd_gen.GenDetachCmd(minor)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't detach local disk: %s",
minor, result.output)
def _ShutdownNet(self, minor):
"""Disconnect from the remote peer.
This fails if we don't have a local device.
@type minor: boolean
@param minor: the device to disconnect from the remote peer
"""
family = self._GetNetFamily(minor, self._lhost, self._rhost)
cmd = self._cmd_gen.GenDisconnectCmd(minor, family,
self._lhost, self._lport,
self._rhost, self._rport)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: can't shutdown network: %s",
minor, result.output)
def Shutdown(self):
"""Shutdown the DRBD device.
"""
if self.minor is None and not self.Attach():
logging.info("drbd%d: not attached during Shutdown()", self._aminor)
return
try:
DRBD8.ShutdownAll(self.minor)
finally:
self.minor = None
self.dev_path = None
def Remove(self):
"""Stub remove for DRBD devices.
"""
self.Shutdown()
def Rename(self, new_id):
"""Rename a device.
This is not supported for drbd devices.
"""
raise errors.ProgrammerError("Can't rename a drbd device")
def Grow(self, amount, dryrun, backingstore, excl_stor):
"""Resize the DRBD device and its backing storage.
See L{BlockDev.Grow} for parameter description.
"""
if self.minor is None:
base.ThrowError("drbd%d: Grow called while not attached", self._aminor)
if len(self._children) != 2 or None in self._children:
base.ThrowError("drbd%d: cannot grow diskless device", self.minor)
self._children[0].Grow(amount, dryrun, backingstore, excl_stor)
if dryrun or backingstore:
# DRBD does not support dry-run mode and is not backing storage,
# so we'll return here
return
cmd = self._cmd_gen.GenResizeCmd(self.minor, self.size + amount)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("drbd%d: resize failed: %s", self.minor, result.output)
@classmethod
def _InitMeta(cls, minor, dev_path):
"""Initialize a meta device.
This will not work if the given minor is in use.
@type minor: int
@param minor: the DRBD minor whose (future) meta device should be
initialized
@type dev_path: string
@param dev_path: path to the meta device to initialize
"""
# Zero the metadata first, in order to make sure drbdmeta doesn't
# try to auto-detect existing filesystems or similar (see
# http://code.google.com/p/ganeti/issues/detail?id=182); we only
# care about the first 128MB of data in the device, even though it
# can be bigger
result = utils.RunCmd([constants.DD_CMD,
"if=/dev/zero", "of=%s" % dev_path,
"bs=%s" % constants.DD_BLOCK_SIZE, "count=128",
"oflag=direct"])
if result.failed:
base.ThrowError("Can't wipe the meta device: %s", result.output)
info = DRBD8.GetProcInfo()
cmd_gen = DRBD8.GetCmdGenerator(info)
cmd = cmd_gen.GenInitMetaCmd(minor, dev_path)
result = utils.RunCmd(cmd)
if result.failed:
base.ThrowError("Can't initialize meta device: %s", result.output)
@classmethod
def Create(cls, unique_id, children, size, spindles, params, excl_stor,
dyn_params, **kwargs):
"""Create a new DRBD8 device.
Since DRBD devices are not created per se, just assembled, this
function only initializes the metadata.
"""
if len(children) != 2:
raise errors.ProgrammerError("Invalid setup for the drbd device")
if excl_stor:
raise errors.ProgrammerError("DRBD device requested with"
" exclusive_storage")
if constants.DDP_LOCAL_MINOR not in dyn_params:
raise errors.ProgrammerError("Invalid dynamic params for drbd device %s"
% dyn_params)
# check that the minor is unused
aminor = dyn_params[constants.DDP_LOCAL_MINOR]
info = DRBD8.GetProcInfo()
if info.HasMinorStatus(aminor):
status = info.GetMinorStatus(aminor)
in_use = status.is_in_use
else:
in_use = False
if in_use:
base.ThrowError("drbd%d: minor is already in use at Create() time",
aminor)
meta = children[1]
meta.Assemble()
if not meta.Attach():
base.ThrowError("drbd%d: can't attach to meta device '%s'",
aminor, meta)
cls._CheckMetaSize(meta.dev_path)
cls._InitMeta(aminor, meta.dev_path)
return cls(unique_id, children, size, params, dyn_params)
def _CanReadDevice(path):
"""Check if we can read from the given device.
This tries to read the first 128k of the device.
@type path: string
"""
try:
utils.ReadFile(path, size=_DEVICE_READ_SIZE)
return True
except EnvironmentError:
logging.warning("Can't read from device %s", path, exc_info=True)
return False
|
{
"content_hash": "d21111d7155a04172c5c1a81bcb16fa5",
"timestamp": "",
"source": "github",
"line_count": 1064,
"max_line_length": 80,
"avg_line_length": 33.955827067669176,
"alnum_prop": 0.636330925295469,
"repo_name": "yiannist/ganeti",
"id": "4f6cd77eaed618fd92823fcb90dab25924831588",
"size": "37501",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/storage/drbd.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Haskell",
"bytes": "2509723"
},
{
"name": "JavaScript",
"bytes": "8808"
},
{
"name": "M4",
"bytes": "31972"
},
{
"name": "Makefile",
"bytes": "96586"
},
{
"name": "Python",
"bytes": "6231906"
},
{
"name": "Shell",
"bytes": "151065"
}
],
"symlink_target": ""
}
|
import unittest, time, sys, time, random, json
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_browse as h2b, h2o_import as h2i, h2o_common
DO_RANDOM_SAMPLE = True
DO_RF = False
print "Assumes you ran ../build_for_clone.py in this directory"
print "Using h2o-nodes.json. Also the sandbox dir"
class releaseTest(h2o_common.ReleaseCommon, unittest.TestCase):
def test_c6_maprfs(self):
h2o.beta_features = True
print "\nLoad a list of files from maprfs, parse and do 1 RF tree"
# larger set in my local dir
# fails because classes aren't integers
# "allstate_claim_prediction_train_set.zip",
csvFilenameAll = [
# "3G_poker_shuffle",
"TEST-poker1000.csv",
"and-testing.data",
"arcene2_train.both",
"arcene_train.both",
"bestbuy_test.csv",
"bestbuy_train.csv",
# "billion_rows.csv.gz",
"covtype.13x.data",
"covtype.13x.shuffle.data",
# "covtype.169x.data",
"covtype.4x.shuffle.data",
"covtype.data",
"covtype4x.shuffle.data",
"hhp.unbalanced.012.1x11.data.gz",
"hhp.unbalanced.012.data.gz",
"hhp.unbalanced.data.gz",
# duplicate column header "A"
# "hhp2.os.noisy.0_1.data",
"hhp2.os.noisy.9_4.data",
"hhp_9_14_12.data",
"leads.csv",
"prostate_long_1G.csv",
]
# find_cloud.py won't set these correctly. Let's just set them here
# h2o.nodes[0].use_maprfs = True
# h2o.nodes[0].use_hdfs = False
# h2o.nodes[0].hdfs_version = 'mapr3.0.1',
# h2o.nodes[0].hdfs_name_node = '192.168.1.171:7222'
h2o.setup_benchmark_log()
# benchmarkLogging = ['cpu','disk', 'network', 'iostats', 'jstack']
# benchmarkLogging = ['cpu','disk', 'network', 'iostats']
# benchmarkLogging = ['cpu', 'disk', 'network']
benchmarkLogging = []
# pick 8 randomly!
if DO_RANDOM_SAMPLE:
csvFilenameList = random.sample(csvFilenameAll,8)
# Alternatively: do the list in order! Note the order is easy to hard
else:
csvFilenameList = csvFilenameAll
# save the first, for all comparisions, to avoid slow drift with each iteration
importFolderPath = "datasets"
trial = 0
for csvFilename in csvFilenameList:
# creates csvFilename.hex from file in hdfs dir
csvPathname = importFolderPath + "/" + csvFilename
timeoutSecs = 1000
# do an import first, because we want to get the size of the file
(importResult, importPattern) = h2i.import_only(path=csvPathname, schema="maprfs", timeoutSecs=timeoutSecs)
succeeded = importResult['succeeded']
if len(succeeded) < 1:
raise Exception("Should have imported at least 1 key for %s" % csvPathname)
# just do a search
foundIt = None
for f in succeeded:
if csvPathname in f['key']:
foundIt = f
break
if not foundIt:
raise Exception("Should have found %s in the imported keys for %s" % (importPattern, csvPathname))
totalBytes = 0
# "succeeded": [
# {
# "file": "maprfs://192.168.1.171:7222/datasets/prostate_long_1G.csv",
# "key": "maprfs://192.168.1.171:7222/datasets/prostate_long_1G.csv",
# "value_size_bytes": 1115287100
# },
print "Loading", csvFilename, 'from maprfs'
start = time.time()
parseResult = h2i.import_parse(path=csvPathname, schema="maprfs", timeoutSecs=timeoutSecs, pollTimeoutSecs=360,
doSummary=True, benchmarkLogging=benchmarkLogging, noPoll=h2o.beta_features)
if h2o.beta_features:
h2j.pollWaitJobs(timeoutSecs=timeoutSecs, pollTimeoutSecs=timeoutSecs)
print "parse result:", parseResult['destination_key']
elapsed = time.time() - start
fileMBS = (totalBytes/1e6)/elapsed
l = '{!s} jvms, {!s}GB heap, {:s} {:s} for {:.2f} secs'.format(
len(h2o.nodes), h2o.nodes[0].java_heap_GB, 'Parse', csvPathname, elapsed)
print "\n"+l
h2o.cloudPerfH2O.message(l)
if DO_RF:
print "\n" + csvFilename
start = time.time()
kwargs = {
'ntrees': 1
}
paramsString = json.dumps(kwargs)
RFview = h2o_cmd.runRF(parseResult=parseResult, timeoutSecs=2000,
benchmarkLogging=benchmarkLogging, noPoll=h2o.beta_features, **kwargs)
if h2o.beta_features:
h2j.pollWaitJobs(timeoutSecs=timeoutSecs, pollTimeoutSecs=timeoutSecs)
elapsed = time.time() - start
print "rf end on ", csvPathname, 'took', elapsed, 'seconds.', "%d pct. of timeout" % ((elapsed/timeoutSecs) * 100)
l = '{!s} jvms, {!s}GB heap, {:s} {:s} {:s} for {:.2f} secs {:s}' .format(
len(h2o.nodes), h2o.nodes[0].java_heap_GB, "RF", "trial "+str(trial), csvFilename, elapsed, paramsString)
print l
h2o.cloudPerfH2O.message(l)
print "Deleting all keys, to make sure our parse times don't include spills"
h2i.delete_keys_at_all_nodes()
trial += 1
if __name__ == '__main__':
h2o.unit_main()
|
{
"content_hash": "384cfa24b9b28ef4df41059c92252fec",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 130,
"avg_line_length": 42.303703703703704,
"alnum_prop": 0.5522675538434599,
"repo_name": "woobe/h2o",
"id": "ea9066cf5d8287b2df14b6cc619f03e019bb07ce",
"size": "5711",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "py/testdir_release/c6/test_c6_maprfs_fvec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from hacksport.problem_templates import CompiledBinary
Problem = CompiledBinary(sources=["vuln.c"])
|
{
"content_hash": "2c2963da3cbae60e7cd10ddc0196e7af",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 54,
"avg_line_length": 33.666666666666664,
"alnum_prop": 0.8118811881188119,
"repo_name": "royragsdale/picoCTF",
"id": "75c3f89de98a8b18216d2c38d54fbe2d1250cd59",
"size": "101",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "problems/examples/binary-exploitation/buffer-overflow-1/challenge.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3499"
},
{
"name": "CSS",
"bytes": "10098"
},
{
"name": "HCL",
"bytes": "29074"
},
{
"name": "HTML",
"bytes": "43833"
},
{
"name": "JavaScript",
"bytes": "335500"
},
{
"name": "Makefile",
"bytes": "160"
},
{
"name": "PHP",
"bytes": "1436"
},
{
"name": "Python",
"bytes": "572119"
},
{
"name": "Ruby",
"bytes": "5035"
},
{
"name": "Shell",
"bytes": "8300"
}
],
"symlink_target": ""
}
|
from ._airflow import Airflow # noqa
|
{
"content_hash": "c9071e3a54c4a28273c0e6d8ab423743",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 37,
"avg_line_length": 38,
"alnum_prop": 0.7368421052631579,
"repo_name": "yebrahim/pydatalab",
"id": "60ad573ee3af97f48c0a947a8eee319e8cd6ac5a",
"size": "626",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "google/datalab/contrib/pipeline/airflow/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7596"
},
{
"name": "Python",
"bytes": "2406009"
},
{
"name": "Shell",
"bytes": "4256"
},
{
"name": "TypeScript",
"bytes": "105309"
}
],
"symlink_target": ""
}
|
from optparse import make_option
import threading
from blinkpy.tool.commands.command import Command
class AbstractLocalServerCommand(Command):
server = None
launch_path = '/'
def __init__(self):
options = [
make_option(
'--httpd-port',
action='store',
type='int',
default=8127,
help='Port to use for the HTTP server'),
make_option(
'--no-show-results',
action='store_false',
default=True,
dest='show_results',
help="Don't launch a browser with the rebaseline server"),
]
super(AbstractLocalServerCommand, self).__init__(options=options)
def _prepare_config(self, options, args, tool):
raise NotImplementedError('Subclasses should implement this method.')
def execute(self, options, args, tool):
config = self._prepare_config(options, args, tool)
server_url = 'http://localhost:%d%s' % (options.httpd_port,
self.launch_path)
print('Starting server at %s' % server_url)
print(
"Use the 'Exit' link in the UI, %squitquitquit or Ctrl-C to stop" %
server_url)
if options.show_results:
# FIXME: This seems racy.
threading.Timer(0.1,
lambda: tool.user.open_url(server_url)).start()
httpd = self.server(httpd_port=options.httpd_port, config=config) # pylint: disable=not-callable
httpd.serve_forever()
|
{
"content_hash": "47b36eed3289658c5fe307a0914879ec",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 105,
"avg_line_length": 34.46808510638298,
"alnum_prop": 0.55,
"repo_name": "nwjs/chromium.src",
"id": "8a9e3f65fa11f230ff5cbfce17bffb0ca5a9c77d",
"size": "2965",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw70",
"path": "third_party/blink/tools/blinkpy/tool/commands/abstract_local_server_command.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
BROKER_URL = "redis://localhost:6379/0"
CELERY_RESULT_BACKEND = "redis"
CELERY_IMPORTS = ("tasks", )
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_RESULT_EXPIRES = None
|
{
"content_hash": "64202d595508cc6b49204105ca22accc",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 39,
"avg_line_length": 21.5,
"alnum_prop": 0.7151162790697675,
"repo_name": "xbzbing/celery-php",
"id": "b76fb6a58611925ad59d8c362f8b4634359b5ac2",
"size": "172",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "testscenario/celeryredisconfig.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "PHP",
"bytes": "51739"
},
{
"name": "Python",
"bytes": "1066"
},
{
"name": "Shell",
"bytes": "734"
},
{
"name": "Smarty",
"bytes": "834"
}
],
"symlink_target": ""
}
|
import os, sys
import unittest
from uuid import uuid4
import numpy as np
from desispec.resolution import Resolution
from desispec.frame import Frame
from desispec.fiberflat import FiberFlat
from desispec import io
from desispec.pipeline.core import runcmd
class TestBinScripts(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.nspec = 5
cls.nwave = 20
id = uuid4().hex
cls.framefile = 'frame-'+id+'.fits'
cls.fiberflatfile = 'fiberflat-'+id+'.fits'
cls.fibermapfile = 'fibermap-'+id+'.fits'
cls.skyfile = 'sky-'+id+'.fits'
cls.topDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
cls.binDir = os.path.join(cls.topDir,'bin')
try:
cls.origPath = os.environ['PYTHONPATH']
os.environ['PYTHONPATH'] = os.path.join(cls.topDir,'py') + ':' + cls.origPath
except KeyError:
cls.origPath = None
os.environ['PYTHONPATH'] = os.path.join(cls.topDir,'py')
@classmethod
def tearDownClass(cls):
"""Cleanup in case tests crashed and left files behind"""
for filename in [cls.framefile, cls.fiberflatfile, cls.fibermapfile, cls.skyfile]:
if os.path.exists(filename):
os.remove(filename)
if cls.origPath is None:
del os.environ['PYTHONPATH']
else:
os.environ['PYTHONPATH'] = cls.origPath
def _write_frame(self):
"""Write a fake frame"""
wave = 5000+np.arange(self.nwave)
flux = np.ones((self.nspec, self.nwave))
ivar = np.ones((self.nspec, self.nwave))
mask = np.zeros((self.nspec, self.nwave), dtype=int)
Rdata = np.ones((self.nspec, 1, self.nwave))
frame = Frame(wave, flux, ivar, mask, Rdata)
io.write_frame(self.framefile, frame)
def _write_fiberflat(self):
"""Write a fake fiberflat"""
wave = 5000+np.arange(self.nwave)
fiberflat = np.ones((self.nspec, self.nwave))
ivar = np.ones((self.nspec, self.nwave))
mask = np.zeros((self.nspec, self.nwave), dtype=int)
meanspec = np.ones(self.nwave)
ff = FiberFlat(wave, fiberflat, ivar, mask, meanspec)
io.write_fiberflat(self.fiberflatfile, ff)
def _write_fibermap(self):
"""Write a fake fiberflat"""
fibermap = io.empty_fibermap(self.nspec)
for i in range(0, self.nspec, 3):
fibermap['OBJTYPE'][i] = 'SKY'
io.write_fibermap(self.fibermapfile, fibermap)
def _write_skymodel(self):
pass
def _write_stdstars(self):
pass
def test_compute_fiberflat(self):
"""
Tests desi_compute_fiberflat.py --infile frame.fits --outfile fiberflat.fits
"""
self._write_frame()
#- run the command and confirm error code = 0
cmd = '{} {}/desi_compute_fiberflat.py --infile {} --outfile {}'.format(
sys.executable, self.binDir, self.framefile, self.fiberflatfile)
# self.assertTrue(os.path.exists(os.path.join(self.binDir,'desi_compute_fiberflat.py')))
err = runcmd(cmd, [self.framefile,], [self.fiberflatfile,], clobber=True)
self.assertEqual(err, 0)
#- Confirm that the output file can be read as a fiberflat
ff = io.read_fiberflat(self.fiberflatfile)
def test_compute_sky(self):
"""
Tests desi_compute_sky.py --infile frame.fits --fibermap fibermap.fits --fiberflat fiberflat.fits --outfile skymodel.fits
"""
self._write_frame()
self._write_fiberflat()
self._write_fibermap()
cmd = "{} {}/desi_compute_sky.py --infile {} --fibermap {} --fiberflat {} --outfile {}".format(
sys.executable, self.binDir, self.framefile, self.fibermapfile, self.fiberflatfile, self.skyfile)
err = runcmd(cmd,
inputs = [self.framefile, self.fiberflatfile, self.fibermapfile],
outputs = [self.skyfile,], clobber=True )
self.assertEqual(err, 0)
#- This runs all test* functions in any TestCase class in this file
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "1274a10d9d47b853a48ba5c0c12c03f8",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 129,
"avg_line_length": 37.57657657657658,
"alnum_prop": 0.6118436825701271,
"repo_name": "profxj/desispec",
"id": "a2db5a3e60f4fbdedcbe3097d6e0abecd8186856",
"size": "4171",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py/desispec/test/test_binscripts.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "236131"
},
{
"name": "Shell",
"bytes": "1591"
}
],
"symlink_target": ""
}
|
from urllib.request import urlopen
from bs4 import BeautifulSoup
from sqlalchemy import create_engine
from sqlalchemy import desc
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from roll import Roll
import os
from datetime import datetime
from price import get_price
db_path = './roll.db'
open(db_path, 'a').close()
os.utime(db_path)
engine = create_engine('sqlite:///' + db_path, echo=True)
Roll.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
year = str(datetime.now().year)
target_url = 'http://www.twse.com.tw/ch/announcement/download/%sSUMMARIES.xls' % (year)
# Get data from url and store into database
with urlopen(target_url) as target:
# the roll announce page is encode by big5. What the heck.
content=target.read().decode('big5', 'ignore')
soup = BeautifulSoup(content, 'html.parser')
table=soup.find('table')
# the table header we don't need, slice it.
for row in table.find_all('tr')[1:]:
record = dict()
columns = row.find_all('td')
# I don't care bond, which code is 6 digits instead of 4, and skip it.
if len(columns[3].string) > 4:
continue
# skip already exist data.
last_record = session.query(Roll).order_by(desc("id")).first()
if last_record is not None and last_record.id >= int(columns[0].string):
print('nothing new')
break
# preapre record data
aroll = Roll(int(columns[0].string), columns[2].string, columns[3].string,
columns[1].string, columns[5].string, columns[6].string,
int(columns[13].string), float(columns[9].string), columns[4].string)
session.add(aroll)
session.commit()
# update target reference price
records = session.query(Roll).filter(Roll.roll_date > datetime.today()).all()
for record in records:
record.latest_price = get_price(record.identity)
session.commit()
session.close()
|
{
"content_hash": "0eecfa4d14b6d11c98aad1b27f143516",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 87,
"avg_line_length": 32.868852459016395,
"alnum_prop": 0.6743142144638404,
"repo_name": "petertom51/tw-stock-roll",
"id": "ce890bb5a8a2b7c86d619d115970757780d041e9",
"size": "2029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4815"
}
],
"symlink_target": ""
}
|
"""
RomTrainer module
This module contains the Step for Training ROMs
Created on May 6, 2021
@author: alfoa
supercedes Steps.py from alfoa (2/16/2013)
"""
#External Modules------------------------------------------------------------------------------------
#External Modules End--------------------------------------------------------------------------------
#Internal Modules------------------------------------------------------------------------------------
from .Step import Step
from ..Models import ROM
#Internal Modules End--------------------------------------------------------------------------------
class RomTrainer(Step):
"""
This step type is used only to train a ROM
"""
def __init__(self):
"""
Constructor
@ In, None
@ Out, None
"""
super().__init__()
self.printTag = 'STEP ROM TRAINER'
def _localInputAndCheckParam(self,paramInput):
"""
Place here specialized reading, input consistency check and
initialization of what will not change during the whole life of the object
@ In, paramInput, ParameterInput, node that represents the portion of the input that belongs to this Step class
@ Out, None
"""
if [item[0] for item in self.parList].count('Input')!=1:
self.raiseAnError(IOError,'Only one Input and only one is allowed for a training step. Step name: '+str(self.name))
if [item[0] for item in self.parList].count('Output')<1:
self.raiseAnError(IOError,'At least one Output is need in a training step. Step name: '+str(self.name))
for item in self.parList:
if item[0]=='Output' and item[2] not in ['ROM']:
self.raiseAnError(IOError,'Only ROM output class are allowed in a training step. Step name: '+str(self.name))
def _localGetInitParams(self):
"""
Place here a specialization of the exporting of what in the step is added to the initial parameters
the printing format of paramDict is key: paramDict[key]
@ In, None
@ Out, paramDict, dict, dictionary containing the parameter names as keys
and each parameter's initial value as the dictionary values
"""
return {}
def _localInitializeStep(self,inDictionary):
"""
This is the API for the local initialization of the children classes of step
The inDictionary contains the instances for each possible role supported in the step (dictionary keywords) the instances of the objects in list if more than one is allowed
The role of _localInitializeStep is to call the initialize method instance if needed
Remember after each initialization to put:
self.raiseADebug('for the role "+key+" the item of class '+inDictionary['key'].type+' and name '+inDictionary['key'].name+' has been initialized')
@ In, inDictionary, dict, the initialization dictionary
@ Out, None
"""
for output in inDictionary['Output']:
if isinstance(output, ROM):
output.initialize(inDictionary['jobHandler'].runInfoDict,inDictionary['Input'])
def _localTakeAstepRun(self,inDictionary):
"""
This is the API for the local run of a step for the children classes
@ In, inDictionary, dict, contains the list of instances (see Simulation)
@ Out, None
"""
#Train the ROM... It is not needed to add the trainingSet since it's already been added in the initialization method
for ROM in inDictionary['Output']:
ROM.train(inDictionary['Input'][0])
|
{
"content_hash": "90b322ba4f7c31bb31c8d57111ecfb59",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 177,
"avg_line_length": 45.53947368421053,
"alnum_prop": 0.625541750939035,
"repo_name": "joshua-cogliati-inl/raven",
"id": "fcb12c30ab406ee799397e7cb39c98bc242bd691",
"size": "4050",
"binary": false,
"copies": "2",
"ref": "refs/heads/devel",
"path": "ravenframework/Steps/RomTrainer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1556080"
},
{
"name": "Batchfile",
"bytes": "1095"
},
{
"name": "C",
"bytes": "148504"
},
{
"name": "C++",
"bytes": "48279546"
},
{
"name": "CMake",
"bytes": "9998"
},
{
"name": "Jupyter Notebook",
"bytes": "84202"
},
{
"name": "MATLAB",
"bytes": "202335"
},
{
"name": "Makefile",
"bytes": "2399"
},
{
"name": "Perl",
"bytes": "1297"
},
{
"name": "Python",
"bytes": "6952659"
},
{
"name": "R",
"bytes": "67"
},
{
"name": "SWIG",
"bytes": "8574"
},
{
"name": "Shell",
"bytes": "124279"
},
{
"name": "TeX",
"bytes": "479725"
}
],
"symlink_target": ""
}
|
execfile("version.py")
import datetime
today = datetime.date.today().strftime("%Y%m%d")
cmd = "git archive --format=tar.gz --prefix=iVisDesigner-%s/ -o dist/iVisDesigner-%s-%s-r%s.tar.gz HEAD" % \
(IV_version, IV_version, today, IV_rev)
print cmd
commands.getoutput(cmd)
|
{
"content_hash": "70a6d1c993b57d304861e3a128f7def1",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 108,
"avg_line_length": 27.8,
"alnum_prop": 0.6942446043165468,
"repo_name": "donghaoren/iVisDesigner",
"id": "19879e2bba0716290d3da7b9ccb4f23693186ef0",
"size": "297",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "archive.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "47560"
},
{
"name": "HTML",
"bytes": "258278"
},
{
"name": "JavaScript",
"bytes": "1193537"
},
{
"name": "Python",
"bytes": "104636"
}
],
"symlink_target": ""
}
|
import logging
from django.contrib.auth.models import User
from desktop.conf import LDAP
from models import UserProfile
from views import import_ldap_users
import ldap_access
LOG = logging.getLogger(__name__)
class LdapSynchronizationMiddleware(object):
"""
Synchronize against LDAP authority.
"""
USER_CACHE_NAME = 'ldap_use_group_sync_cache'
def process_request(self, request):
user = request.user
if not user or not user.is_authenticated():
return
if not User.objects.filter(username=user.username, userprofile__creation_method=str(UserProfile.CreationMethod.EXTERNAL)).exists():
LOG.warn("User %s is not an Ldap user" % user.username)
return
# Cache should be cleared when user logs out.
if self.USER_CACHE_NAME not in request.session:
if LDAP.LDAP_SERVERS.get():
connection = ldap_access.get_connection_from_server(next(LDAP.LDAP_SERVERS.__iter__()))
else:
connection = ldap_access.get_connection_from_server()
request.session[self.USER_CACHE_NAME] = import_ldap_users(connection, user.username, sync_groups=True, import_by_dn=False)
request.session.modified = True
|
{
"content_hash": "0a617b9c49a4be60d4ab5aee42179837",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 135,
"avg_line_length": 30.025641025641026,
"alnum_prop": 0.7181895815542272,
"repo_name": "2013Commons/hue",
"id": "e749d6b0d12a39516e626b15676e2be075495ef6",
"size": "1963",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "apps/useradmin/src/useradmin/middleware.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import pytest
from celery.result import EagerResult
from {{ cookiecutter.project_slug }}.users.tasks import get_users_count
from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
@pytest.mark.django_db
def test_user_count(settings):
"""A basic test to execute the get_users_count Celery task."""
UserFactory.create_batch(3)
settings.CELERY_TASK_ALWAYS_EAGER = True
task_result = get_users_count.delay()
assert isinstance(task_result, EagerResult)
assert task_result.result == 3
|
{
"content_hash": "f78a2a5fa68e6c4693a5c4a90b2405cc",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 77,
"avg_line_length": 33.0625,
"alnum_prop": 0.7466918714555766,
"repo_name": "Parbhat/cookiecutter-django-foundation",
"id": "addb091db5c334cf45d191f4a0ee78ff9ab3f7d6",
"size": "529",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5146"
},
{
"name": "CSS",
"bytes": "24354"
},
{
"name": "Dockerfile",
"bytes": "4243"
},
{
"name": "HTML",
"bytes": "24755"
},
{
"name": "JavaScript",
"bytes": "3953"
},
{
"name": "Makefile",
"bytes": "5664"
},
{
"name": "Python",
"bytes": "84786"
},
{
"name": "Shell",
"bytes": "13833"
}
],
"symlink_target": ""
}
|
from unittest import mock
from heat.common import exception
from heat.common import template_format
from heat.engine.resources.openstack.neutron import security_group_rule
from heat.tests import common
from heat.tests.openstack.neutron import inline_templates
from heat.tests import utils
class SecurityGroupRuleTest(common.HeatTestCase):
def test_resource_mapping(self):
mapping = security_group_rule.resource_mapping()
self.assertEqual(mapping['OS::Neutron::SecurityGroupRule'],
security_group_rule.SecurityGroupRule)
@mock.patch('heat.engine.clients.os.neutron.'
'NeutronClientPlugin.has_extension', return_value=True)
def _create_stack(self, ext_func,
tmpl=inline_templates.SECURITY_GROUP_RULE_TEMPLATE):
self.t = template_format.parse(tmpl)
self.stack = utils.parse_stack(self.t)
self.sg_rule = self.stack['security_group_rule']
self.neutron_client = mock.MagicMock()
self.sg_rule.client = mock.MagicMock(return_value=self.neutron_client)
self.sg_rule.client_plugin().find_resourceid_by_name_or_id = (
mock.MagicMock(return_value='123'))
def test_create(self):
self._create_stack()
self.neutron_client.create_security_group_rule.return_value = {
'security_group_rule': {'id': '1234'}}
expected = {
'security_group_rule': {
'security_group_id': u'123',
'description': u'test description',
'remote_group_id': u'123',
'protocol': u'tcp',
'port_range_min': '100',
'direction': 'ingress',
'ethertype': 'IPv4'
}
}
self.sg_rule.handle_create()
self.neutron_client.create_security_group_rule.assert_called_with(
expected)
def test_validate_conflict_props(self):
self.patchobject(security_group_rule.SecurityGroupRule,
'is_service_available',
return_value=(True, None))
tmpl = inline_templates.SECURITY_GROUP_RULE_TEMPLATE
tmpl += ' remote_ip_prefix: "10.0.0.0/8"'
self._create_stack(tmpl=tmpl)
self.assertRaises(exception.ResourcePropertyConflict,
self.sg_rule.validate)
def test_validate_max_port_less_than_min_port(self):
self.patchobject(security_group_rule.SecurityGroupRule,
'is_service_available',
return_value=(True, None))
tmpl = inline_templates.SECURITY_GROUP_RULE_TEMPLATE
tmpl += ' port_range_max: 50'
self._create_stack(tmpl=tmpl)
self.assertRaises(exception.StackValidationFailed,
self.sg_rule.validate)
def test_show_resource(self):
self._create_stack()
self.sg_rule.resource_id_set('1234')
self.neutron_client.show_security_group_rule.return_value = {
'security_group_rule': {'id': '1234'}
}
self.assertEqual({'id': '1234'}, self.sg_rule._show_resource())
self.neutron_client.show_security_group_rule.assert_called_with('1234')
def test_delete(self):
self._create_stack()
self.sg_rule.resource_id_set('1234')
self.sg_rule.handle_delete()
self.neutron_client.delete_security_group_rule.assert_called_with(
'1234')
|
{
"content_hash": "87826113a4671a8f4b4df5d858c9f1a8",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 37.32258064516129,
"alnum_prop": 0.6070296744454048,
"repo_name": "openstack/heat",
"id": "49b4276344fe3d1deb20265e21d6774993635527",
"size": "4046",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/tests/openstack/neutron/test_neutron_security_group_rule.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "9145593"
},
{
"name": "Shell",
"bytes": "65832"
}
],
"symlink_target": ""
}
|
"""
Mission
-------
This is a work-around to bolt saltstack modules and states into any python code.
Saltstack is a permissively-licensed, pythonic configuration management system.
Fabric is a task-runner for all sorts of miscellaneous configurations. Both are
essential tools.
You can read the documentation for the commands at:
- http://docs.saltstack.com/en/latest/ref/states/all/
- http://docs.saltstack.com/en/latest/ref/modules/all/
Growing big
-----------
If you think that your fabric file is getting too bit, or your needs get
too complicated, you can migrate your project to a salt's declarative YAML
syntax, cloud provisioning and more.
You can begin to move over using a crawl-walk-run approach. First, start
creating some initial `salt states`_ in YAML.
You can start by installing to your local machine and using ``salt-ssh`` to
setup servers. This is `Roster`_\ -based configuration.
If you are beginning to manage large clusters of servers, it makes sense to
start controlling your system via a `Salt Master`_.
.. _Roster: http://docs.saltstack.com/en/latest/topics/ssh/roster.html
.. _salt states: http://docs.saltstack.com/en/latest/topics/tutorials/starting_states.html
.. _Salt Master: http://docs.saltstack.com/en/latest/topics/tutorials/walkthrough.html#setting-up-the-salt-master
Copyright 2015, Tony Narlock (tony@git-pull.com). Licensed MIT.
Fabric is MIT, https://github.com/fabric/fabric/blob/master/LICENSE
SaltStack is Apache 2: https://github.com/saltstack/salt/blob/develop/LICENSE
"""
try:
import salt
except ImportError:
raise Exception("""
You must install the latest version of salt:
curl -L https://bootstrap.saltstack.com -o install_salt.sh
sudo sh install_salt.sh git develop
""")
def bootstrap_python():
"""
Temporary __builtins__ workaround until saltstack is refactored to handle
importing modules pythonically.
"""
from salt.config import DEFAULT_MASTER_OPTS
import salt.log.setup
salt.log.setup.setup_temp_logger(log_level='debug')
__builtins__.__salt__ = {
'cmd.run': salt.modules.cmdmod._run_quiet,
'cmd.run_all': salt.modules.cmdmod.run_all
}
__builtins__.__grains__ = {}
__builtins__.__opts__ = DEFAULT_MASTER_OPTS
__builtins__.__salt__.update(
salt.loader.minion_mods(
__opts__
)
)
def git_clone_salt():
from salt.modules import git
git.clone(
cwd='/home/tony/study/salt/_salt',
repository='https://github.com/saltstack/salt',
)
def latest_salt():
import salt.states.git
return salt.states.git.latest(
target='/home/tony/study/salt/_salt2',
name='https://github.com/saltstack/salt',
)
if __name__ == '__main__':
bootstrap_python()
print(latest_salt())
|
{
"content_hash": "a96c5f9dd5a56ceb0d8bf4b7a9c1da51",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 113,
"avg_line_length": 28.816326530612244,
"alnum_prop": 0.6922804532577904,
"repo_name": "tony/salt-states-configs",
"id": "79d6d0512add054f82caa20dfc37c7c1f5bf7b29",
"size": "2846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt_python.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "8488"
},
{
"name": "Nginx",
"bytes": "1675"
},
{
"name": "Python",
"bytes": "2846"
},
{
"name": "Scheme",
"bytes": "210"
}
],
"symlink_target": ""
}
|
import codecs
import os.path
import re
import subprocess
from functools import partial
from collections import defaultdict
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from lib import struct, newAction, newIcon, addActions, fmtShortcut
from shape import Shape, DEFAULT_LINE_COLOR, DEFAULT_FILL_COLOR
from canvas import Canvas
from zoomWidget import ZoomWidget
from labelDialog import LabelDialog
from colorDialog import ColorDialog
from labelFile import LabelFile, LabelFileError
from toolBar import ToolBar
from pascal_voc_io import PascalVocReader
import cv2
import numpy
from track import Meanshife
__appname__ = 'labelImg'
### Utility functions and classes.
class Settings(object):
"""Convenience dict-like wrapper around QSettings."""
def __init__(self, types=None):
self.data = QSettings()
self.types = defaultdict(lambda: QVariant, types if types else {})
def __setitem__(self, key, value):
t = self.types[key]
self.data.setValue(key,
t(value) if not isinstance(value, t) else value)
def __getitem__(self, key):
return self._cast(key, self.data.value(key))
def get(self, key, default=None):
return self._cast(key, self.data.value(key, default))
def _cast(self, key, value):
# XXX: Very nasty way of converting types to QVariant methods :P
t = self.types[key]
if t != QVariant:
method = getattr(QVariant, re.sub('^Q', 'to', t.__name__, count=1))
return method(value)
return value
def numpy2Qimage(Mat):
if Mat is not None:
# Notice the dimensions.
height, width, bytesPerComponent = Mat.shape
bytesPerLine = bytesPerComponent * width;
# cv2.imshow("Show Image with Opencv", Mat)
# Convert to RGB for QImage.
cv2.cvtColor(Mat, cv2.cv.CV_BGR2RGB, Mat)
image = QImage(Mat.data, width, height, bytesPerLine, QImage.Format_RGB888)
return image
class WindowMixin(object):
def menu(self, title, actions=None):
menu = self.menuBar().addMenu(title)
if actions:
addActions(menu, actions)
return menu
def toolbar(self, title, actions=None):
toolbar = ToolBar(title)
toolbar.setObjectName(u'%sToolBar' % title)
#toolbar.setOrientation(Qt.Vertical)
toolbar.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
if actions:
addActions(toolbar, actions)
self.addToolBar(Qt.LeftToolBarArea, toolbar)
return toolbar
class MainWindow(QMainWindow, WindowMixin):
FIT_WINDOW, FIT_WIDTH, MANUAL_ZOOM = range(3)
def __init__(self, filename=None):
super(MainWindow, self).__init__()
self.setWindowTitle(__appname__)
# Save as Pascal voc xml
self.defaultSaveDir = None
self.usingPascalVocFormat = True
if self.usingPascalVocFormat:
LabelFile.suffix = '.xml'
# For loading all image under a directory
self.mImgList = []
self.dirname = None
self.labelHist = []
self.lastOpenDir = None
# Whether we need to save or not.
self.dirty = False
# Enble auto saving if pressing next
self.autoSaving = True
self._noSelectionSlot = False
self._beginner = True
# self.screencastViewer = "firefox"
# self.screencast = "https://youtu.be/p0nR2YsCY_U"
self.loadPredefinedClasses()
# Main widgets and related state.
self.labelDialog = LabelDialog(parent=self, listItem=self.labelHist)
self.labelList = QListWidget()
self.itemsToShapes = {}
self.shapesToItems = {}
self.labelList.itemActivated.connect(self.labelSelectionChanged)
self.labelList.itemSelectionChanged.connect(self.labelSelectionChanged)
self.labelList.itemDoubleClicked.connect(self.editLabel)
# Connect to itemChanged to detect checkbox changes.
self.labelList.itemChanged.connect(self.labelItemChanged)
listLayout = QVBoxLayout()
listLayout.setContentsMargins(0, 0, 0, 0)
listLayout.addWidget(self.labelList)
self.editButton = QToolButton()
self.editButton.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.labelListContainer = QWidget()
self.labelListContainer.setLayout(listLayout)
listLayout.addWidget(self.editButton)#, 0, Qt.AlignCenter)
listLayout.addWidget(self.labelList)
self.dock = QDockWidget(u'Box Labels', self)
self.dock.setObjectName(u'Labels')
self.dock.setWidget(self.labelListContainer)
# Tzutalin 20160906 : Add file list and dock to move faster
self.fileListWidget = QListWidget()
self.fileListWidget.itemDoubleClicked.connect(self.fileitemDoubleClicked)
filelistLayout = QVBoxLayout()
filelistLayout.setContentsMargins(0, 0, 0, 0)
filelistLayout.addWidget(self.fileListWidget)
self.fileListContainer = QWidget()
self.fileListContainer.setLayout(filelistLayout)
self.filedock = QDockWidget(u'File List', self)
self.filedock.setObjectName(u'Files')
self.filedock.setWidget(self.fileListContainer)
self.zoomWidget = ZoomWidget()
self.colorDialog = ColorDialog(parent=self)
self.canvas = Canvas()
self.canvas.zoomRequest.connect(self.zoomRequest)
scroll = QScrollArea()
scroll.setWidget(self.canvas)
scroll.setWidgetResizable(True)
self.scrollBars = {
Qt.Vertical: scroll.verticalScrollBar(),
Qt.Horizontal: scroll.horizontalScrollBar()
}
self.canvas.scrollRequest.connect(self.scrollRequest)
self.canvas.newShape.connect(self.newShape)
self.canvas.shapeMoved.connect(self.setDirty)
self.canvas.selectionChanged.connect(self.shapeSelectionChanged)
self.canvas.drawingPolygon.connect(self.toggleDrawingSensitive)
self.setCentralWidget(scroll)
self.addDockWidget(Qt.RightDockWidgetArea, self.dock)
# Tzutalin 20160906 : Add file list and dock to move faster
self.addDockWidget(Qt.RightDockWidgetArea, self.filedock)
self.dockFeatures = QDockWidget.DockWidgetClosable\
| QDockWidget.DockWidgetFloatable
self.dock.setFeatures(self.dock.features() ^ self.dockFeatures)
# Actions
action = partial(newAction, self)
quit = action('&Quit', self.close,
'Ctrl+Q', 'quit', u'Quit application')
open = action('&Open', self.openFile,
'Ctrl+O', 'open', u'Open image or label file')
opendir = action('&Open Dir', self.openDir,
'Ctrl+u', 'open', u'Open Dir')
changeSavedir = action('&Change default saved Annotation dir', self.changeSavedir,
'Ctrl+r', 'open', u'Change default saved Annotation dir')
openAnnotation = action('&Open Annotation', self.openAnnotation,
'Ctrl+q', 'openAnnotation', u'Open Annotation')
openNextImg = action('&Next Image', self.openNextImg,
'n', 'next', u'Open Next')
openPrevImg = action('&Prev Image', self.openPrevImg,
'p', 'prev', u'Open Prev')
save = action('&Save', self.saveFile,
'Ctrl+S', 'save', u'Save labels to file', enabled=False)
saveAs = action('&Save As', self.saveFileAs,
'Ctrl+Shift+S', 'save-as', u'Save labels to a different file',
enabled=False)
close = action('&Close', self.closeFile,
'Ctrl+W', 'close', u'Close current file')
color1 = action('Box &Line Color', self.chooseColor1,
'Ctrl+L', 'color_line', u'Choose Box line color')
color2 = action('Box &Fill Color', self.chooseColor2,
'Ctrl+Shift+L', 'color', u'Choose Box fill color')
createMode = action('Create\nRectBox', self.setCreateMode,
'Ctrl+N', 'new', u'Start drawing Boxs', enabled=False)
editMode = action('&Edit\nRectBox', self.setEditMode,
'Ctrl+J', 'edit', u'Move and edit Boxs', enabled=False)
create = action('Create\nRectBox', self.createShape,
'Ctrl+N', 'new', u'Draw a new Box', enabled=False)
delete = action('Delete\nRectBox', self.deleteSelectedShape,
'Delete', 'delete', u'Delete', enabled=False)
copy = action('&Duplicate\nRectBox', self.copySelectedShape,
'Ctrl+D', 'copy', u'Create a duplicate of the selected Box',
enabled=False)
advancedMode = action('&Advanced Mode', self.toggleAdvancedMode,
'Ctrl+Shift+A', 'expert', u'Switch to advanced mode',
checkable=True)
hideAll = action('&Hide\nRectBox', partial(self.togglePolygons, False),
'Ctrl+H', 'hide', u'Hide all Boxs',
enabled=False)
showAll = action('&Show\nRectBox', partial(self.togglePolygons, True),
'Ctrl+A', 'hide', u'Show all Boxs',
enabled=False)
help = action('&Tutorial', self.tutorial, 'Ctrl+T', 'help',
u'Show demos')
zoom = QWidgetAction(self)
zoom.setDefaultWidget(self.zoomWidget)
self.zoomWidget.setWhatsThis(
u"Zoom in or out of the image. Also accessible with"\
" %s and %s from the canvas." % (fmtShortcut("Ctrl+[-+]"),
fmtShortcut("Ctrl+Wheel")))
self.zoomWidget.setEnabled(False)
zoomIn = action('Zoom &In', partial(self.addZoom, 10),
'Ctrl++', 'zoom-in', u'Increase zoom level', enabled=False)
zoomOut = action('&Zoom Out', partial(self.addZoom, -10),
'Ctrl+-', 'zoom-out', u'Decrease zoom level', enabled=False)
zoomOrg = action('&Original size', partial(self.setZoom, 100),
'Ctrl+=', 'zoom', u'Zoom to original size', enabled=False)
fitWindow = action('&Fit Window', self.setFitWindow,
'Ctrl+F', 'fit-window', u'Zoom follows window size',
checkable=True, enabled=False)
fitWidth = action('Fit &Width', self.setFitWidth,
'Ctrl+Shift+F', 'fit-width', u'Zoom follows window width',
checkable=True, enabled=False)
# Group zoom controls into a list for easier toggling.
zoomActions = (self.zoomWidget, zoomIn, zoomOut, zoomOrg, fitWindow, fitWidth)
self.zoomMode = self.MANUAL_ZOOM
self.scalers = {
self.FIT_WINDOW: self.scaleFitWindow,
self.FIT_WIDTH: self.scaleFitWidth,
# Set to one to scale to 100% when loading files.
self.MANUAL_ZOOM: lambda: 1,
}
edit = action('&Edit Label', self.editLabel,
'Ctrl+E', 'edit', u'Modify the label of the selected Box',
enabled=False)
self.editButton.setDefaultAction(edit)
shapeLineColor = action('Shape &Line Color', self.chshapeLineColor,
icon='color_line', tip=u'Change the line color for this specific shape',
enabled=False)
shapeFillColor = action('Shape &Fill Color', self.chshapeFillColor,
icon='color', tip=u'Change the fill color for this specific shape',
enabled=False)
labels = self.dock.toggleViewAction()
labels.setText('Show/Hide Label Panel')
labels.setShortcut('Ctrl+Shift+L')
# Lavel list context menu.
labelMenu = QMenu()
addActions(labelMenu, (edit, delete))
self.labelList.setContextMenuPolicy(Qt.CustomContextMenu)
self.labelList.customContextMenuRequested.connect(self.popLabelListMenu)
# Store actions for further handling.
self.actions = struct(save=save, saveAs=saveAs, open=open, close=close,
lineColor=color1, fillColor=color2,
create=create, delete=delete, edit=edit, copy=copy,
createMode=createMode, editMode=editMode, advancedMode=advancedMode,
shapeLineColor=shapeLineColor, shapeFillColor=shapeFillColor,
zoom=zoom, zoomIn=zoomIn, zoomOut=zoomOut, zoomOrg=zoomOrg,
fitWindow=fitWindow, fitWidth=fitWidth,
zoomActions=zoomActions,
fileMenuActions=(open, opendir, save, saveAs, close, quit),
beginner=(), advanced=(),
editMenu=(edit, copy, delete, None, color1, color2),
beginnerContext=(create, edit, copy, delete),
advancedContext=(createMode, editMode, edit, copy,
delete, shapeLineColor, shapeFillColor),
onLoadActive=(close, create, createMode, editMode),
onShapesPresent=(saveAs, hideAll, showAll))
self.menus = struct(
file=self.menu('&File'),
edit=self.menu('&Edit'),
view=self.menu('&View'),
help=self.menu('&Help'),
recentFiles=QMenu('Open &Recent'),
labelList=labelMenu)
addActions(self.menus.file,
(open, opendir,changeSavedir, openAnnotation, self.menus.recentFiles, save, saveAs, close, None, quit))
addActions(self.menus.help, (help,))
addActions(self.menus.view, (
labels, advancedMode, None,
hideAll, showAll, None,
zoomIn, zoomOut, zoomOrg, None,
fitWindow, fitWidth))
self.menus.file.aboutToShow.connect(self.updateFileMenu)
# Custom context menu for the canvas widget:
addActions(self.canvas.menus[0], self.actions.beginnerContext)
addActions(self.canvas.menus[1], (
action('&Copy here', self.copyShape),
action('&Move here', self.moveShape)))
self.tools = self.toolbar('Tool')
self.actions.beginner = (
open, opendir, openNextImg, openPrevImg, save, None, create, copy, delete, None,
zoomIn, zoom, zoomOut, fitWindow, fitWidth)
self.actions.advanced = (
open, save, None,
createMode, editMode, None,
hideAll, showAll)
self.statusBar().showMessage('%s started.' % __appname__)
self.statusBar().show()
# Application state.
self.image = QImage()
self.filename = filename
self.recentFiles = []
self.maxRecent = 7
self.lineColor = None
self.fillColor = None
self.zoom_level = 100
self.fit_window = False
# XXX: Could be completely declarative.
# Restore application settings.
types = {
'filename': QString,
'recentFiles': QStringList,
'window/size': QSize,
'window/position': QPoint,
'window/geometry': QByteArray,
# Docks and toolbars:
'window/state': QByteArray,
'savedir': QString,
'lastOpenDir': QString,
}
self.settings = settings = Settings(types)
self.recentFiles = list(settings['recentFiles'])
size = settings.get('window/size', QSize(600, 500))
position = settings.get('window/position', QPoint(0, 0))
self.resize(size)
self.move(position)
saveDir = settings.get('savedir', None)
self.lastOpenDir = settings.get('lastOpenDir', None)
if os.path.exists(unicode(saveDir)):
self.defaultSaveDir = unicode(saveDir)
self.statusBar().showMessage('%s started. Annotation will be saved to %s' %(__appname__, self.defaultSaveDir))
self.statusBar().show()
# or simply:
#self.restoreGeometry(settings['window/geometry']
self.restoreState(settings['window/state'])
self.lineColor = QColor(settings.get('line/color', Shape.line_color))
self.fillColor = QColor(settings.get('fill/color', Shape.fill_color))
Shape.line_color = self.lineColor
Shape.fill_color = self.fillColor
if settings.get('advanced', QVariant()).toBool():
self.actions.advancedMode.setChecked(True)
self.toggleAdvancedMode()
# Populate the File menu dynamically.
self.updateFileMenu()
# Since loading the file may take some time, make sure it runs in the background.
self.queueEvent(partial(self.loadFile, self.filename))
# Callbacks:
self.zoomWidget.valueChanged.connect(self.paintCanvas)
self.populateModeActions()
self.cap = None
self._num = 0
## Support Functions ##
def noShapes(self):
return not self.itemsToShapes
def toggleAdvancedMode(self, value=True):
self._beginner = not value
self.canvas.setEditing(True)
self.populateModeActions()
self.editButton.setVisible(not value)
if value:
self.actions.createMode.setEnabled(True)
self.actions.editMode.setEnabled(False)
self.dock.setFeatures(self.dock.features() | self.dockFeatures)
else:
self.dock.setFeatures(self.dock.features() ^ self.dockFeatures)
def populateModeActions(self):
if self.beginner():
tool, menu = self.actions.beginner, self.actions.beginnerContext
else:
tool, menu = self.actions.advanced, self.actions.advancedContext
self.tools.clear()
addActions(self.tools, tool)
self.canvas.menus[0].clear()
addActions(self.canvas.menus[0], menu)
self.menus.edit.clear()
actions = (self.actions.create,) if self.beginner()\
else (self.actions.createMode, self.actions.editMode)
addActions(self.menus.edit, actions + self.actions.editMenu)
def setBeginner(self):
self.tools.clear()
addActions(self.tools, self.actions.beginner)
def setAdvanced(self):
self.tools.clear()
addActions(self.tools, self.actions.advanced)
def setDirty(self):
self.dirty = True
self.actions.save.setEnabled(True)
def setClean(self):
self.dirty = False
self.actions.save.setEnabled(False)
self.actions.create.setEnabled(True)
def toggleActions(self, value=True):
"""Enable/Disable widgets which depend on an opened image."""
for z in self.actions.zoomActions:
z.setEnabled(value)
for action in self.actions.onLoadActive:
action.setEnabled(value)
def queueEvent(self, function):
QTimer.singleShot(0, function)
def status(self, message, delay=5000):
self.statusBar().showMessage(message, delay)
def resetState(self):
self.itemsToShapes.clear()
self.shapesToItems.clear()
self.labelList.clear()
self.filename = None
self.imageData = None
self.labelFile = None
self.canvas.resetState()
def currentItem(self):
items = self.labelList.selectedItems()
if items:
return items[0]
return None
def addRecentFile(self, filename):
if filename in self.recentFiles:
self.recentFiles.remove(filename)
elif len(self.recentFiles) >= self.maxRecent:
self.recentFiles.pop()
self.recentFiles.insert(0, filename)
def beginner(self):
return self._beginner
def advanced(self):
return not self.beginner()
## Callbacks ##
def tutorial(self):
subprocess.Popen([self.screencastViewer, self.screencast])
def createShape(self):
assert self.beginner()
self.canvas.setEditing(False)
self.actions.create.setEnabled(False)
def toggleDrawingSensitive(self, drawing=True):
"""In the middle of drawing, toggling between modes should be disabled."""
self.actions.editMode.setEnabled(not drawing)
if not drawing and self.beginner():
# Cancel creation.
print 'Cancel creation.'
self.canvas.setEditing(True)
self.canvas.restoreCursor()
self.actions.create.setEnabled(True)
def toggleDrawMode(self, edit=True):
self.canvas.setEditing(edit)
self.actions.createMode.setEnabled(edit)
self.actions.editMode.setEnabled(not edit)
def setCreateMode(self):
assert self.advanced()
self.toggleDrawMode(False)
def setEditMode(self):
assert self.advanced()
self.toggleDrawMode(True)
def updateFileMenu(self):
current = self.filename
def exists(filename):
return os.path.exists(unicode(filename))
menu = self.menus.recentFiles
menu.clear()
files = [f for f in self.recentFiles if f != current and exists(f)]
for i, f in enumerate(files):
icon = newIcon('labels')
action = QAction(
icon, '&%d %s' % (i+1, QFileInfo(f).fileName()), self)
action.triggered.connect(partial(self.loadRecent, f))
menu.addAction(action)
def popLabelListMenu(self, point):
self.menus.labelList.exec_(self.labelList.mapToGlobal(point))
def editLabel(self, item=None):
if not self.canvas.editing():
return
item = item if item else self.currentItem()
text = self.labelDialog.popUp(item.text())
if text is not None:
item.setText(text)
self.setDirty()
# Tzutalin 20160906 : Add file list and dock to move faster
def fileitemDoubleClicked(self, item=None):
currIndex = self.mImgList.index(str(item.text()))
if currIndex < len(self.mImgList):
filename = self.mImgList[currIndex]
if filename:
self.loadFile(filename)
# React to canvas signals.
def shapeSelectionChanged(self, selected=False):
if self._noSelectionSlot:
self._noSelectionSlot = False
else:
shape = self.canvas.selectedShape
if shape:
self.labelList.setItemSelected(self.shapesToItems[shape], True)
else:
self.labelList.clearSelection()
self.actions.delete.setEnabled(selected)
self.actions.copy.setEnabled(selected)
self.actions.edit.setEnabled(selected)
self.actions.shapeLineColor.setEnabled(selected)
self.actions.shapeFillColor.setEnabled(selected)
def addLabel(self, shape):
item = QListWidgetItem(shape.label)
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
item.setCheckState(Qt.Checked)
self.itemsToShapes[item] = shape
self.shapesToItems[shape] = item
self.labelList.addItem(item)
for action in self.actions.onShapesPresent:
action.setEnabled(True)
def remLabel(self, shape):
item = self.shapesToItems[shape]
self.labelList.takeItem(self.labelList.row(item))
del self.shapesToItems[shape]
del self.itemsToShapes[item]
def loadLabels(self, shapes):
s = []
for label, points, line_color, fill_color in shapes:
shape = Shape(label=label)
for x, y in points:
shape.addPoint(QPointF(x, y))
shape.close()
s.append(shape)
self.addLabel(shape)
if line_color:
shape.line_color = QColor(*line_color)
if fill_color:
shape.fill_color = QColor(*fill_color)
self.canvas.loadShapes(s)
def saveLabels(self, filename):
lf = LabelFile()
def format_shape(s):
return dict(label=unicode(s.label),
line_color=s.line_color.getRgb()\
if s.line_color != self.lineColor else None,
fill_color=s.fill_color.getRgb()\
if s.fill_color != self.fillColor else None,
points=[(p.x(), p.y()) for p in s.points])
shapes = [format_shape(shape) for shape in self.canvas.shapes]
# Can add differrent annotation formats here
try:
if self.usingPascalVocFormat is True:
print 'savePascalVocFormat save to:' + filename
lf.savePascalVocFormat(filename, shapes, unicode(self.filename), self.imageData,
self.lineColor.getRgb(), self.fillColor.getRgb())
else:
lf.save(filename, shapes, unicode(self.filename), self.imageData,
self.lineColor.getRgb(), self.fillColor.getRgb())
self.labelFile = lf
self.filename = filename
return True
except LabelFileError, e:
self.errorMessage(u'Error saving label data',
u'<b>%s</b>' % e)
return False
def copySelectedShape(self):
self.addLabel(self.canvas.copySelectedShape())
#fix copy and delete
self.shapeSelectionChanged(True)
def labelSelectionChanged(self):
item = self.currentItem()
if item and self.canvas.editing():
self._noSelectionSlot = True
self.canvas.selectShape(self.itemsToShapes[item])
def labelItemChanged(self, item):
shape = self.itemsToShapes[item]
label = unicode(item.text())
if label != shape.label:
shape.label = unicode(item.text())
self.setDirty()
else: # User probably changed item visibility
self.canvas.setShapeVisible(shape, item.checkState() == Qt.Checked)
## Callback functions:
def newShape(self):
"""Pop-up and give focus to the label editor.
position MUST be in global coordinates.
"""
if len(self.labelHist) > 0:
self.labelDialog = LabelDialog(parent=self, listItem=self.labelHist)
text = self.labelDialog.popUp()
if text is not None:
self.addLabel(self.canvas.setLastLabel(text))
if self.beginner(): # Switch to edit mode.
self.canvas.setEditing(True)
self.actions.create.setEnabled(True)
else:
self.actions.editMode.setEnabled(True)
self.setDirty()
if text not in self.labelHist:
self.labelHist.append(text)
else:
#self.canvas.undoLastLine()
self.canvas.resetAllLines()
def scrollRequest(self, delta, orientation):
units = - delta / (8 * 15)
bar = self.scrollBars[orientation]
bar.setValue(bar.value() + bar.singleStep() * units)
def setZoom(self, value):
self.actions.fitWidth.setChecked(False)
self.actions.fitWindow.setChecked(False)
self.zoomMode = self.MANUAL_ZOOM
self.zoomWidget.setValue(value)
def addZoom(self, increment=10):
self.setZoom(self.zoomWidget.value() + increment)
def zoomRequest(self, delta):
units = delta / (8 * 15)
scale = 10
self.addZoom(scale * units)
def setFitWindow(self, value=True):
if value:
self.actions.fitWidth.setChecked(False)
self.zoomMode = self.FIT_WINDOW if value else self.MANUAL_ZOOM
self.adjustScale()
def setFitWidth(self, value=True):
if value:
self.actions.fitWindow.setChecked(False)
self.zoomMode = self.FIT_WIDTH if value else self.MANUAL_ZOOM
self.adjustScale()
def togglePolygons(self, value):
for item, shape in self.itemsToShapes.iteritems():
item.setCheckState(Qt.Checked if value else Qt.Unchecked)
def loadFile(self, filename=None):
# type: (object) -> object
# type: (object) -> object
"""Load the specified file, or the last opened file if None.
:rtype: object
"""
self.resetState()
self.canvas.setEnabled(False)
if filename is None:
filename = self.settings['filename']
filename = unicode(filename)
# Tzutalin 20160906 : Add file list and dock to move faster
# Highlight the file item
if filename and self.fileListWidget.count() > 0:
index = self.mImgList.index(filename)
fileWidgetItem = self.fileListWidget.item(index)
self.fileListWidget.setItemSelected(fileWidgetItem, True)
if QFile.exists(filename):
if LabelFile.isLabelFile(filename):
try:
self.labelFile = LabelFile(filename)
except LabelFileError, e:
self.errorMessage(u'Error opening file',
(u"<p><b>%s</b></p>"
u"<p>Make sure <i>%s</i> is a valid label file.")\
% (e, filename))
self.status("Error reading %s" % filename)
return False
self.imageData = self.labelFile.imageData
self.lineColor = QColor(*self.labelFile.lineColor)
self.fillColor = QColor(*self.labelFile.fillColor)
elif filename.endswith('mp4') or filename.endswith('avi') or filename.endswith('mov'):
self.labelFile = None
if self.cap == None:
self._num = 0
self.cap = cv2.VideoCapture(filename)
_, self.imageData = self.cap.read()
else:
ret, self.imageData = self.cap.read()
if ret:
self._num += 1
else:
# Load image:
# read data first and store for saving into label file.
#self.imageData = read(filename, None)
self.cap = None
self.labelFile = None
self.imageData = cv2.imread(filename)
image = numpy2Qimage(self.imageData)
if image.isNull():
self.errorMessage(u'Error opening file',
u"<p>Make sure <i>%s</i> is a valid image file." % filename)
self.status("Error reading %s" % filename)
return False
self.status("Loaded %s" % os.path.basename(unicode(filename)))
self.image = image
self.filename = filename
self.canvas.loadPixmap(QPixmap.fromImage(image))
if self.labelFile:
self.loadLabels(self.labelFile.shapes)
self.setClean()
self.canvas.setEnabled(True)
self.adjustScale(initial=True)
self.paintCanvas()
self.addRecentFile(self.filename)
self.toggleActions(True)
## Label xml file and show bound box according to its filename
if self.cap is not None and self.defaultSaveDir is not None:
filename = self.filename
index_str = str(self._num)
basename = os.path.basename(os.path.splitext(filename)[0]) + index_str + '.xml'
xmlPath = os.path.join(self.defaultSaveDir, basename)
if QFile.exists(xmlPath):
self.loadPascalXMLByFilename(xmlPath)
else:
basename = os.path.basename(os.path.splitext(filename)[0])\
+ (str(self._num - 1) if self._num > 0 else str(self._num))\
+ '.xml'
xmlPath = os.path.join(self.defaultSaveDir, basename)
self.loadPascalXMLByFilename(xmlPath, True)
if self.usingPascalVocFormat is True and \
self.defaultSaveDir is not None:
basename = os.path.basename(os.path.splitext(self.filename)[0]) + '.xml'
xmlPath = os.path.join(self.defaultSaveDir, basename)
self.loadPascalXMLByFilename(xmlPath,)
return True
return False
def resizeEvent(self, event):
if self.canvas and not self.image.isNull()\
and self.zoomMode != self.MANUAL_ZOOM:
self.adjustScale()
super(MainWindow, self).resizeEvent(event)
def paintCanvas(self):
assert not self.image.isNull(), "cannot paint null image"
self.canvas.scale = 0.01 * self.zoomWidget.value()
self.canvas.adjustSize()
self.canvas.update()
def adjustScale(self, initial=False):
value = self.scalers[self.FIT_WINDOW if initial else self.zoomMode]()
self.zoomWidget.setValue(int(100 * value))
def scaleFitWindow(self):
"""Figure out the size of the pixmap in order to fit the main widget."""
e = 2.0 # So that no scrollbars are generated.
w1 = self.centralWidget().width() - e
h1 = self.centralWidget().height() - e
a1 = w1/ h1
# Calculate a new scale value based on the pixmap's aspect ratio.
w2 = self.canvas.pixmap.width() - 0.0
h2 = self.canvas.pixmap.height() - 0.0
a2 = w2 / h2
return w1 / w2 if a2 >= a1 else h1 / h2
def scaleFitWidth(self):
# The epsilon does not seem to work too well here.
w = self.centralWidget().width() - 2.0
return w / self.canvas.pixmap.width()
def closeEvent(self, event):
if not self.mayContinue():
event.ignore()
s = self.settings
# If it loads images from dir, don't load it at the begining
if self.dirname is None:
s['filename'] = self.filename if self.filename else QString()
else:
s['filename'] = ''
s['window/size'] = self.size()
s['window/position'] = self.pos()
s['window/state'] = self.saveState()
s['line/color'] = self.lineColor
s['fill/color'] = self.fillColor
s['recentFiles'] = self.recentFiles
s['advanced'] = not self._beginner
if self.defaultSaveDir is not None and len(self.defaultSaveDir) > 1:
s['savedir'] = str(self.defaultSaveDir)
else:
s['savedir'] = ""
if self.lastOpenDir is not None and len(self.lastOpenDir) > 1:
s['lastOpenDir'] = str(self.lastOpenDir)
else:
s['lastOpenDir'] = ""
#ask the use for where to save the labels
#s['window/geometry'] = self.saveGeometry()
## User Dialogs ##
def loadRecent(self, filename):
if self.mayContinue():
self.loadFile(filename)
def scanAllImages(self, folderPath):
extensions = ['.jpeg','.jpg', '.png', '.bmp']
images = []
for root, dirs, files in os.walk(folderPath):
for file in files:
if file.lower().endswith(tuple(extensions)):
relatviePath = os.path.join(root, file)
images.append(os.path.abspath(relatviePath))
images.sort(key=lambda x: x.lower())
return images
def changeSavedir(self, _value=False):
if self.defaultSaveDir is not None:
path = unicode(self.defaultSaveDir)
else:
path = '.'
dirpath = unicode(QFileDialog.getExistingDirectory(self,
'%s - Save to the directory' % __appname__, path, QFileDialog.ShowDirsOnly
| QFileDialog.DontResolveSymlinks))
if dirpath is not None and len(dirpath) > 1:
self.defaultSaveDir = dirpath
self.statusBar().showMessage('%s . Annotation will be saved to %s' %('Change saved folder', self.defaultSaveDir))
self.statusBar().show()
def openAnnotation(self, _value=False):
if self.filename is None:
return
path = os.path.dirname(unicode(self.filename))\
if self.filename else '.'
if self.usingPascalVocFormat:
formats = ['*.%s' % unicode(fmt).lower()\
for fmt in QImageReader.supportedImageFormats()]
filters = "Open Annotation XML file (%s)" % \
' '.join(formats + ['*.xml'])
filename = unicode(QFileDialog.getOpenFileName(self,
'%s - Choose a xml file' % __appname__, path, filters))
self.loadPascalXMLByFilename(filename)
def openDir(self, _value=False):
if not self.mayContinue():
return
path = os.path.dirname(unicode(self.filename))\
if self.filename else '.'
if self.lastOpenDir is not None and len(self.lastOpenDir) > 1:
path = self.lastOpenDir
dirpath = unicode(QFileDialog.getExistingDirectory(self,
'%s - Open Directory' % __appname__, path, QFileDialog.ShowDirsOnly
| QFileDialog.DontResolveSymlinks))
if dirpath is not None and len(dirpath) > 1:
self.lastOpenDir = dirpath
self.dirname = dirpath
self.mImgList = self.scanAllImages(dirpath)
self.openNextImg()
for imgPath in self.mImgList:
item = QListWidgetItem(imgPath)
self.fileListWidget.addItem(item)
def openPrevImg(self, _value=False):
if not self.mayContinue():
return
if len(self.mImgList) <= 0:
return
if self.filename is None:
return
currIndex = self.mImgList.index(self.filename)
if currIndex -1 >= 0:
filename = self.mImgList[currIndex-1]
if filename:
self.loadFile(filename)
def openNextImg(self, _value=False):
# Proceding next image without dialog if having any label
if self.cap is not None:
self.dirty = True
if self.autoSaving is True and self.defaultSaveDir is not None:
if self.dirty is True and self.hasLabels():
self.saveFile()
if self.cap != None:
return self.loadFile(self.filename)
if not self.mayContinue():
return
if len(self.mImgList) <= 0:
return
filename = None
if self.filename is None:
filename = self.mImgList[0]
else:
currIndex = self.mImgList.index(self.filename)
if currIndex + 1 < len(self.mImgList):
filename = self.mImgList[currIndex+1]
if filename:
self.loadFile(filename)
def openFile(self, _value=False):
if not self.mayContinue():
return
path = os.path.dirname(unicode(self.filename))\
if self.filename else '.'
formats = ['*.%s' % unicode(fmt).lower()\
for fmt in QImageReader.supportedImageFormats()]
filters = "Image & Label files (%s)" % \
' '.join(formats + ['*%s' % LabelFile.suffix] + ['*.avi', '*.mp4'])
filename = unicode(QFileDialog.getOpenFileName(self,
'%s - Choose Image or Label file' % __appname__, path, filters))
if self.cap != None:
self.cap.release()
self.cap = None
if filename:
self.loadFile(filename)
def saveFile(self, _value=False):
assert not self.image.isNull(), "cannot save empty image"
if self.hasLabels():
if self.defaultSaveDir is not None and len(str(self.defaultSaveDir)):
print 'handle the image:' + self.filename
imgFileName = os.path.basename(self.filename)
savedFileName = os.path.splitext(imgFileName)[0] + (str(self._num) if self.cap is not None else '')
savedFileName = savedFileName + LabelFile.suffix
savedPath = os.path.join(str(self.defaultSaveDir), savedFileName)
self._saveFile(savedPath)
else:
self._saveFile(self.filename if self.labelFile\
else self.saveFileDialog())
def saveFileAs(self, _value=False):
assert not self.image.isNull(), "cannot save empty image"
if self.hasLabels():
self._saveFile(self.saveFileDialog())
def saveFileDialog(self):
caption = '%s - Choose File' % __appname__
filters = 'File (*%s)' % LabelFile.suffix
openDialogPath = self.currentPath()
dlg = QFileDialog(self, caption, openDialogPath, filters)
dlg.setDefaultSuffix(LabelFile.suffix[1:])
dlg.setAcceptMode(QFileDialog.AcceptSave)
dlg.setConfirmOverwrite(True)
filenameWithoutExtension = os.path.splitext(self.filename)[0]
dlg.selectFile(filenameWithoutExtension)
dlg.setOption(QFileDialog.DontUseNativeDialog, False)
if dlg.exec_():
return dlg.selectedFiles()[0]
return ''
def _saveFile(self, filename):
if filename and self.saveLabels(filename):
self.addRecentFile(filename)
self.setClean()
self.statusBar().showMessage('Saved to %s' % filename)
self.statusBar().show()
def closeFile(self, _value=False):
if not self.mayContinue():
return
self.resetState()
self.setClean()
self.toggleActions(False)
self.canvas.setEnabled(False)
self.actions.saveAs.setEnabled(False)
# Message Dialogs. #
def hasLabels(self):
if not self.itemsToShapes:
self.errorMessage(u'No objects labeled',
u'You must label at least one object to save the file.')
return False
return True
def mayContinue(self):
return not (self.dirty and not self.discardChangesDialog())
def discardChangesDialog(self):
yes, no = QMessageBox.Yes, QMessageBox.No
msg = u'You have unsaved changes, proceed anyway?'
return yes == QMessageBox.warning(self, u'Attention', msg, yes|no)
def errorMessage(self, title, message):
return QMessageBox.critical(self, title,
'<p><b>%s</b></p>%s' % (title, message))
def currentPath(self):
return os.path.dirname(unicode(self.filename)) if self.filename else '.'
def chooseColor1(self):
color = self.colorDialog.getColor(self.lineColor, u'Choose line color',
default=DEFAULT_LINE_COLOR)
if color:
self.lineColor = color
# Change the color for all shape lines:
Shape.line_color = self.lineColor
self.canvas.update()
self.setDirty()
def chooseColor2(self):
color = self.colorDialog.getColor(self.fillColor, u'Choose fill color',
default=DEFAULT_FILL_COLOR)
if color:
self.fillColor = color
Shape.fill_color = self.fillColor
self.canvas.update()
self.setDirty()
def deleteSelectedShape(self):
yes, no = QMessageBox.Yes, QMessageBox.No
msg = u'You are about to permanently delete this Box, proceed anyway?'
if yes == QMessageBox.warning(self, u'Attention', msg, yes|no):
self.remLabel(self.canvas.deleteSelected())
self.setDirty()
if self.noShapes():
for action in self.actions.onShapesPresent:
action.setEnabled(False)
def chshapeLineColor(self):
color = self.colorDialog.getColor(self.lineColor, u'Choose line color',
default=DEFAULT_LINE_COLOR)
if color:
self.canvas.selectedShape.line_color = color
self.canvas.update()
self.setDirty()
def chshapeFillColor(self):
color = self.colorDialog.getColor(self.fillColor, u'Choose fill color',
default=DEFAULT_FILL_COLOR)
if color:
self.canvas.selectedShape.fill_color = color
self.canvas.update()
self.setDirty()
def copyShape(self):
self.canvas.endMove(copy=True)
self.addLabel(self.canvas.selectedShape)
self.setDirty()
def moveShape(self):
self.canvas.endMove(copy=False)
self.setDirty()
def loadPredefinedClasses(self):
predefined_classes_path = os.path.join('data', 'predefined_classes.txt')
if os.path.exists(predefined_classes_path) is True:
with codecs.open(predefined_classes_path, 'r', 'utf8') as f:
for line in f:
line = line.strip()
if self.labelHist is None:
self.lablHist = [line]
else:
self.labelHist.append(line)
def loadPascalXMLByFilename(self, xmlpath, istrack=False):
if self.filename is None:
return
if os.path.isfile(xmlpath) is False:
return
tVocParseReader = PascalVocReader(xmlpath)
shapes = tVocParseReader.getShapes()
if istrack is True:
track_res = []
for shape in shapes:
point1, _, point2, _ = shape[1]
width = point2[0] - point1[0]
height = point2[1] - point1[1]
rect = (point1[0], point1[1], width, height)
tracker = Meanshife(rect, self.imageData)
out_rect = tracker.track(self.imageData)
x, y, w, h = out_rect
temp_shape = (shape[0], [(x, y), (x+w, y), (x+w, y+h), (x, y+h)], shape[2], shape[3])
track_res.append(temp_shape)
self.loadLabels(track_res)
else:
self.loadLabels(shapes)
|
{
"content_hash": "15ba2f20ece2ca82989c73bce9a45a93",
"timestamp": "",
"source": "github",
"line_count": 1173,
"max_line_length": 122,
"avg_line_length": 38.83972719522592,
"alnum_prop": 0.5928576132048552,
"repo_name": "chenpeikai/labelimg",
"id": "fa245a15cec02da9a2104c9fb23d03295567278f",
"size": "45559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libs/ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "50"
},
{
"name": "Python",
"bytes": "87833"
},
{
"name": "Shell",
"bytes": "3057"
}
],
"symlink_target": ""
}
|
'''
New Integration Test for hybrid.
@author: Legion
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import time
test_obj_dict = test_state.TestStateDict()
test_stub = test_lib.lib_get_test_stub()
hybrid = test_stub.HybridObject()
def test():
hybrid.add_datacenter_iz(add_datacenter_only=True)
hybrid.create_bucket()
time.sleep(10)
hybrid.update_oss_bucket(description='test-OSS-Bucket')
test_util.test_pass('Update OSS Bucket Test Success')
def env_recover():
if hybrid.oss_bucket:
hybrid.del_bucket()
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
|
{
"content_hash": "5bf89d73901c208b57b5c92759ecfc49",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 59,
"avg_line_length": 22.514285714285716,
"alnum_prop": 0.7258883248730964,
"repo_name": "zstackio/zstack-woodpecker",
"id": "834ff6b17eb870fe2b5b7ba6f9a41c44bbf81bcb",
"size": "788",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/hybrid/test_update_oss_bucket.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, with_statement
import os
import traceback
from tornado.escape import utf8, native_str, to_unicode
from tornado.template import Template, DictLoader, ParseError, Loader
from tornado.testing import LogTrapTestCase
from tornado.util import b, bytes_type, ObjectDict
class TemplateTest(LogTrapTestCase):
def test_simple(self):
template = Template("Hello {{ name }}!")
self.assertEqual(template.generate(name="Ben"),
b("Hello Ben!"))
def test_bytes(self):
template = Template("Hello {{ name }}!")
self.assertEqual(template.generate(name=utf8("Ben")),
b("Hello Ben!"))
def test_expressions(self):
template = Template("2 + 2 = {{ 2 + 2 }}")
self.assertEqual(template.generate(), b("2 + 2 = 4"))
def test_comment(self):
template = Template("Hello{# TODO i18n #} {{ name }}!")
self.assertEqual(template.generate(name=utf8("Ben")),
b("Hello Ben!"))
def test_include(self):
loader = DictLoader({
"index.html": '{% include "header.html" %}\nbody text',
"header.html": "header text",
})
self.assertEqual(loader.load("index.html").generate(),
b("header text\nbody text"))
def test_extends(self):
loader = DictLoader({
"base.html": """\
<title>{% block title %}default title{% end %}</title>
<body>{% block body %}default body{% end %}</body>
""",
"page.html": """\
{% extends "base.html" %}
{% block title %}page title{% end %}
{% block body %}page body{% end %}
""",
})
self.assertEqual(loader.load("page.html").generate(),
b("<title>page title</title>\n<body>page body</body>\n"))
def test_relative_load(self):
loader = DictLoader({
"a/1.html": "{% include '2.html' %}",
"a/2.html": "{% include '../b/3.html' %}",
"b/3.html": "ok",
})
self.assertEqual(loader.load("a/1.html").generate(),
b("ok"))
def test_escaping(self):
self.assertRaises(ParseError, lambda: Template("{{"))
self.assertRaises(ParseError, lambda: Template("{%"))
self.assertEqual(Template("{{!").generate(), b("{{"))
self.assertEqual(Template("{%!").generate(), b("{%"))
self.assertEqual(Template("{{ 'expr' }} {{!jquery expr}}").generate(),
b("expr {{jquery expr}}"))
def test_unicode_template(self):
template = Template(utf8(u"\u00e9"))
self.assertEqual(template.generate(), utf8(u"\u00e9"))
def test_unicode_literal_expression(self):
# Unicode literals should be usable in templates. Note that this
# test simulates unicode characters appearing directly in the
# template file (with utf8 encoding), i.e. \u escapes would not
# be used in the template file itself.
if str is unicode:
# python 3 needs a different version of this test since
# 2to3 doesn't run on template internals
template = Template(utf8(u'{{ "\u00e9" }}'))
else:
template = Template(utf8(u'{{ u"\u00e9" }}'))
self.assertEqual(template.generate(), utf8(u"\u00e9"))
def test_custom_namespace(self):
loader = DictLoader({"test.html": "{{ inc(5) }}"}, namespace={"inc": lambda x: x + 1})
self.assertEqual(loader.load("test.html").generate(), b("6"))
def test_apply(self):
def upper(s):
return s.upper()
template = Template(utf8("{% apply upper %}foo{% end %}"))
self.assertEqual(template.generate(upper=upper), b("FOO"))
def test_if(self):
template = Template(utf8("{% if x > 4 %}yes{% else %}no{% end %}"))
self.assertEqual(template.generate(x=5), b("yes"))
self.assertEqual(template.generate(x=3), b("no"))
def test_try(self):
template = Template(utf8("""{% try %}
try{% set y = 1/x %}
{% except %}-except
{% else %}-else
{% finally %}-finally
{% end %}"""))
self.assertEqual(template.generate(x=1), b("\ntry\n-else\n-finally\n"))
self.assertEqual(template.generate(x=0), b("\ntry-except\n-finally\n"))
def test_comment_directive(self):
template = Template(utf8("{% comment blah blah %}foo"))
self.assertEqual(template.generate(), b("foo"))
class StackTraceTest(LogTrapTestCase):
def test_error_line_number_expression(self):
loader = DictLoader({"test.html": """one
two{{1/0}}
three
"""})
try:
loader.load("test.html").generate()
except ZeroDivisionError:
self.assertTrue("# test.html:2" in traceback.format_exc())
def test_error_line_number_directive(self):
loader = DictLoader({"test.html": """one
two{%if 1/0%}
three{%end%}
"""})
try:
loader.load("test.html").generate()
except ZeroDivisionError:
self.assertTrue("# test.html:2" in traceback.format_exc())
def test_error_line_number_module(self):
loader = DictLoader({
"base.html": "{% module Template('sub.html') %}",
"sub.html": "{{1/0}}",
}, namespace={"_modules": ObjectDict({"Template": lambda path, **kwargs: loader.load(path).generate(**kwargs)})})
try:
loader.load("base.html").generate()
except ZeroDivisionError:
exc_stack = traceback.format_exc()
self.assertTrue('# base.html:1' in exc_stack)
self.assertTrue('# sub.html:1' in exc_stack)
def test_error_line_number_include(self):
loader = DictLoader({
"base.html": "{% include 'sub.html' %}",
"sub.html": "{{1/0}}",
})
try:
loader.load("base.html").generate()
except ZeroDivisionError:
self.assertTrue("# sub.html:1 (via base.html:1)" in
traceback.format_exc())
def test_error_line_number_extends_base_error(self):
loader = DictLoader({
"base.html": "{{1/0}}",
"sub.html": "{% extends 'base.html' %}",
})
try:
loader.load("sub.html").generate()
except ZeroDivisionError:
exc_stack = traceback.format_exc()
self.assertTrue("# base.html:1" in exc_stack)
def test_error_line_number_extends_sub_error(self):
loader = DictLoader({
"base.html": "{% block 'block' %}{% end %}",
"sub.html": """
{% extends 'base.html' %}
{% block 'block' %}
{{1/0}}
{% end %}
"""})
try:
loader.load("sub.html").generate()
except ZeroDivisionError:
self.assertTrue("# sub.html:4 (via base.html:1)" in
traceback.format_exc())
def test_multi_includes(self):
loader = DictLoader({
"a.html": "{% include 'b.html' %}",
"b.html": "{% include 'c.html' %}",
"c.html": "{{1/0}}",
})
try:
loader.load("a.html").generate()
except ZeroDivisionError:
self.assertTrue("# c.html:1 (via b.html:1, a.html:1)" in
traceback.format_exc())
class AutoEscapeTest(LogTrapTestCase):
def setUp(self):
self.templates = {
"escaped.html": "{% autoescape xhtml_escape %}{{ name }}",
"unescaped.html": "{% autoescape None %}{{ name }}",
"default.html": "{{ name }}",
"include.html": """\
escaped: {% include 'escaped.html' %}
unescaped: {% include 'unescaped.html' %}
default: {% include 'default.html' %}
""",
"escaped_block.html": """\
{% autoescape xhtml_escape %}\
{% block name %}base: {{ name }}{% end %}""",
"unescaped_block.html": """\
{% autoescape None %}\
{% block name %}base: {{ name }}{% end %}""",
# Extend a base template with different autoescape policy,
# with and without overriding the base's blocks
"escaped_extends_unescaped.html": """\
{% autoescape xhtml_escape %}\
{% extends "unescaped_block.html" %}""",
"escaped_overrides_unescaped.html": """\
{% autoescape xhtml_escape %}\
{% extends "unescaped_block.html" %}\
{% block name %}extended: {{ name }}{% end %}""",
"unescaped_extends_escaped.html": """\
{% autoescape None %}\
{% extends "escaped_block.html" %}""",
"unescaped_overrides_escaped.html": """\
{% autoescape None %}\
{% extends "escaped_block.html" %}\
{% block name %}extended: {{ name }}{% end %}""",
"raw_expression.html": """\
{% autoescape xhtml_escape %}\
expr: {{ name }}
raw: {% raw name %}""",
}
def test_default_off(self):
loader = DictLoader(self.templates, autoescape=None)
name = "Bobby <table>s"
self.assertEqual(loader.load("escaped.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("unescaped.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("default.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("include.html").generate(name=name),
b("escaped: Bobby <table>s\n"
"unescaped: Bobby <table>s\n"
"default: Bobby <table>s\n"))
def test_default_on(self):
loader = DictLoader(self.templates, autoescape="xhtml_escape")
name = "Bobby <table>s"
self.assertEqual(loader.load("escaped.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("unescaped.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("default.html").generate(name=name),
b("Bobby <table>s"))
self.assertEqual(loader.load("include.html").generate(name=name),
b("escaped: Bobby <table>s\n"
"unescaped: Bobby <table>s\n"
"default: Bobby <table>s\n"))
def test_unextended_block(self):
loader = DictLoader(self.templates)
name = "<script>"
self.assertEqual(loader.load("escaped_block.html").generate(name=name),
b("base: <script>"))
self.assertEqual(loader.load("unescaped_block.html").generate(name=name),
b("base: <script>"))
def test_extended_block(self):
loader = DictLoader(self.templates)
def render(name):
return loader.load(name).generate(name="<script>")
self.assertEqual(render("escaped_extends_unescaped.html"),
b("base: <script>"))
self.assertEqual(render("escaped_overrides_unescaped.html"),
b("extended: <script>"))
self.assertEqual(render("unescaped_extends_escaped.html"),
b("base: <script>"))
self.assertEqual(render("unescaped_overrides_escaped.html"),
b("extended: <script>"))
def test_raw_expression(self):
loader = DictLoader(self.templates)
def render(name):
return loader.load(name).generate(name='<>&"')
self.assertEqual(render("raw_expression.html"),
b("expr: <>&"\n"
"raw: <>&\""))
def test_custom_escape(self):
loader = DictLoader({"foo.py":
"{% autoescape py_escape %}s = {{ name }}\n"})
def py_escape(s):
self.assertEqual(type(s), bytes_type)
return repr(native_str(s))
def render(template, name):
return loader.load(template).generate(py_escape=py_escape,
name=name)
self.assertEqual(render("foo.py", "<html>"),
b("s = '<html>'\n"))
self.assertEqual(render("foo.py", "';sys.exit()"),
b("""s = "';sys.exit()"\n"""))
self.assertEqual(render("foo.py", ["not a string"]),
b("""s = "['not a string']"\n"""))
class TemplateLoaderTest(LogTrapTestCase):
def setUp(self):
self.loader = Loader(os.path.join(os.path.dirname(__file__), "templates"))
def test_utf8_in_file(self):
tmpl = self.loader.load("utf8.html")
result = tmpl.generate()
self.assertEqual(to_unicode(result).strip(), u"H\u00e9llo")
|
{
"content_hash": "8fba1925034f2612998fa36b0e770c26",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 121,
"avg_line_length": 38.45945945945946,
"alnum_prop": 0.5382212852346373,
"repo_name": "pombredanne/catawampus",
"id": "d70e4875c4b42e8980868131aa01519e28324a65",
"size": "12807",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tr/vendor/tornado/tornado/test/template_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "3855"
},
{
"name": "Python",
"bytes": "664568"
},
{
"name": "Shell",
"bytes": "85447"
}
],
"symlink_target": ""
}
|
from Tkinter import *
from Tkinter import _flatten, _cnfmerge, _default_root
# WARNING - TkVersion is a limited precision floating point number
if TkVersion < 3.999:
raise ImportError, "This version of Tix.py requires Tk 4.0 or higher"
import _tkinter # If this fails your Python may not be configured for Tk
# Some more constants (for consistency with Tkinter)
WINDOW = 'window'
TEXT = 'text'
STATUS = 'status'
IMMEDIATE = 'immediate'
IMAGE = 'image'
IMAGETEXT = 'imagetext'
BALLOON = 'balloon'
AUTO = 'auto'
ACROSSTOP = 'acrosstop'
# Some constants used by Tkinter dooneevent()
TCL_DONT_WAIT = 1 << 1
TCL_WINDOW_EVENTS = 1 << 2
TCL_FILE_EVENTS = 1 << 3
TCL_TIMER_EVENTS = 1 << 4
TCL_IDLE_EVENTS = 1 << 5
TCL_ALL_EVENTS = 0
# BEWARE - this is implemented by copying some code from the Widget class
# in Tkinter (to override Widget initialization) and is therefore
# liable to break.
import Tkinter, os
# Could probably add this to Tkinter.Misc
class tixCommand:
"""The tix commands provide access to miscellaneous elements
of Tix's internal state and the Tix application context.
Most of the information manipulated by these commands pertains
to the application as a whole, or to a screen or
display, rather than to a particular window.
This is a mixin class, assumed to be mixed to Tkinter.Tk
that supports the self.tk.call method.
"""
def tix_addbitmapdir(self, directory):
"""Tix maintains a list of directories under which
the tix_getimage and tix_getbitmap commands will
search for image files. The standard bitmap directory
is $TIX_LIBRARY/bitmaps. The addbitmapdir command
adds directory into this list. By using this
command, the image files of an applications can
also be located using the tix_getimage or tix_getbitmap
command.
"""
return self.tk.call('tix', 'addbitmapdir', directory)
def tix_cget(self, option):
"""Returns the current value of the configuration
option given by option. Option may be any of the
options described in the CONFIGURATION OPTIONS section.
"""
return self.tk.call('tix', 'cget', option)
def tix_configure(self, cnf=None, **kw):
"""Query or modify the configuration options of the Tix application
context. If no option is specified, returns a dictionary all of the
available options. If option is specified with no value, then the
command returns a list describing the one named option (this list
will be identical to the corresponding sublist of the value
returned if no option is specified). If one or more option-value
pairs are specified, then the command modifies the given option(s)
to have the given value(s); in this case the command returns an
empty string. Option may be any of the configuration options.
"""
# Copied from Tkinter.py
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(self.tk.call('tix', 'configure')):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, StringType):
x = self.tk.split(self.tk.call('tix', 'configure', '-'+cnf))
return (x[0][1:],) + x[1:]
return self.tk.call(('tix', 'configure') + self._options(cnf))
def tix_filedialog(self, dlgclass=None):
"""Returns the file selection dialog that may be shared among
different calls from this application. This command will create a
file selection dialog widget when it is called the first time. This
dialog will be returned by all subsequent calls to tix_filedialog.
An optional dlgclass parameter can be passed to specified what type
of file selection dialog widget is desired. Possible options are
tix FileSelectDialog or tixExFileSelectDialog.
"""
if dlgclass is not None:
return self.tk.call('tix', 'filedialog', dlgclass)
else:
return self.tk.call('tix', 'filedialog')
def tix_getbitmap(self, name):
"""Locates a bitmap file of the name name.xpm or name in one of the
bitmap directories (see the tix_addbitmapdir command above). By
using tix_getbitmap, you can avoid hard coding the pathnames of the
bitmap files in your application. When successful, it returns the
complete pathname of the bitmap file, prefixed with the character
'@'. The returned value can be used to configure the -bitmap
option of the TK and Tix widgets.
"""
return self.tk.call('tix', 'getbitmap', name)
def tix_getimage(self, name):
"""Locates an image file of the name name.xpm, name.xbm or name.ppm
in one of the bitmap directories (see the addbitmapdir command
above). If more than one file with the same name (but different
extensions) exist, then the image type is chosen according to the
depth of the X display: xbm images are chosen on monochrome
displays and color images are chosen on color displays. By using
tix_ getimage, you can advoid hard coding the pathnames of the
image files in your application. When successful, this command
returns the name of the newly created image, which can be used to
configure the -image option of the Tk and Tix widgets.
"""
return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name):
"""Gets the options manitained by the Tix
scheme mechanism. Available options include:
active_bg active_fg bg
bold_font dark1_bg dark1_fg
dark2_bg dark2_fg disabled_fg
fg fixed_font font
inactive_bg inactive_fg input1_bg
input2_bg italic_font light1_bg
light1_fg light2_bg light2_fg
menu_font output1_bg output2_bg
select_bg select_fg selector
"""
# could use self.tk.globalgetvar('tixOption', name)
return self.tk.call('tix', 'option', 'get', name)
def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None):
"""Resets the scheme and fontset of the Tix application to
newScheme and newFontSet, respectively. This affects only those
widgets created after this call. Therefore, it is best to call the
resetoptions command before the creation of any widgets in a Tix
application.
The optional parameter newScmPrio can be given to reset the
priority level of the Tk options set by the Tix schemes.
Because of the way Tk handles the X option database, after Tix has
been has imported and inited, it is not possible to reset the color
schemes and font sets using the tix config command. Instead, the
tix_resetoptions command must be used.
"""
if newScmPrio is not None:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio)
else:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet)
class Tk(Tkinter.Tk, tixCommand):
"""Toplevel widget of Tix which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
def __init__(self, screenName=None, baseName=None, className='Tix'):
Tkinter.Tk.__init__(self, screenName, baseName, className)
tixlib = os.environ.get('TIX_LIBRARY')
self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]')
if tixlib is not None:
self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib)
self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib)
# Load Tix - this should work dynamically or statically
# If it's static, tcl/tix8.1/pkgIndex.tcl should have
# 'load {} Tix'
# If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have
# 'load libtix8.1.8.3.so Tix'
self.tk.eval('package require Tix')
def destroy(self):
# For safety, remove an delete_window binding before destroy
self.protocol("WM_DELETE_WINDOW", "")
Tkinter.Tk.destroy(self)
# The Tix 'tixForm' geometry manager
class Form:
"""The Tix Form geometry manager
Widgets can be arranged by specifying attachments to other widgets.
See Tix documentation for complete details"""
def config(self, cnf={}, **kw):
self.tk.call('tixForm', self._w, *self._options(cnf, kw))
form = config
def __setitem__(self, key, value):
Form.form(self, {key: value})
def check(self):
return self.tk.call('tixForm', 'check', self._w)
def forget(self):
self.tk.call('tixForm', 'forget', self._w)
def grid(self, xsize=0, ysize=0):
if (not xsize) and (not ysize):
x = self.tk.call('tixForm', 'grid', self._w)
y = self.tk.splitlist(x)
z = ()
for x in y:
z = z + (self.tk.getint(x),)
return z
return self.tk.call('tixForm', 'grid', self._w, xsize, ysize)
def info(self, option=None):
if not option:
return self.tk.call('tixForm', 'info', self._w)
if option[0] != '-':
option = '-' + option
return self.tk.call('tixForm', 'info', self._w, option)
def slaves(self):
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'tixForm', 'slaves', self._w)))
Tkinter.Widget.__bases__ = Tkinter.Widget.__bases__ + (Form,)
class TixWidget(Tkinter.Widget):
"""A TixWidget class is used to package all (or most) Tix widgets.
Widget initialization is extended in two ways:
1) It is possible to give a list of options which must be part of
the creation command (so called Tix 'static' options). These cannot be
given as a 'config' command later.
2) It is possible to give the name of an existing TK widget. These are
child widgets created automatically by a Tix mega-widget. The Tk call
to create these widgets is therefore bypassed in TixWidget.__init__
Both options are for use by subclasses only.
"""
def __init__ (self, master=None, widgetName=None,
static_options=None, cnf={}, kw={}):
# Merge keywords and dictionary arguments
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
# Move static options into extra. static_options must be
# a list of keywords (or None).
extra=()
# 'options' is always a static option
if static_options:
static_options.append('options')
else:
static_options = ['options']
for k,v in cnf.items()[:]:
if k in static_options:
extra = extra + ('-' + k, v)
del cnf[k]
self.widgetName = widgetName
Widget._setup(self, master, cnf)
# If widgetName is None, this is a dummy creation call where the
# corresponding Tk widget has already been created by Tix
if widgetName:
self.tk.call(widgetName, self._w, *extra)
# Non-static options - to be done via a 'config' command
if cnf:
Widget.config(self, cnf)
# Dictionary to hold subwidget names for easier access. We can't
# use the children list because the public Tix names may not be the
# same as the pathname component
self.subwidget_list = {}
# We set up an attribute access function so that it is possible to
# do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello'
# when w is a StdButtonBox.
# We can even do w.ok.invoke() because w.ok is subclassed from the
# Button class if you go through the proper constructors
def __getattr__(self, name):
if self.subwidget_list.has_key(name):
return self.subwidget_list[name]
raise AttributeError, name
def set_silent(self, value):
"""Set a variable without calling its action routine"""
self.tk.call('tixSetSilent', self._w, value)
def subwidget(self, name):
"""Return the named subwidget (which must have been created by
the sub-class)."""
n = self._subwidget_name(name)
if not n:
raise TclError, "Subwidget " + name + " not child of " + self._name
# Remove header of name and leading dot
n = n[len(self._w)+1:]
return self._nametowidget(n)
def subwidgets_all(self):
"""Return all subwidgets."""
names = self._subwidget_names()
if not names:
return []
retlist = []
for name in names:
name = name[len(self._w)+1:]
try:
retlist.append(self._nametowidget(name))
except:
# some of the widgets are unknown e.g. border in LabelFrame
pass
return retlist
def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None
def _subwidget_names(self):
"""Return the name of all subwidgets."""
try:
x = self.tk.call(self._w, 'subwidgets', '-all')
return self.tk.split(x)
except TclError:
return None
def config_all(self, option, value):
"""Set configuration options for all subwidgets (and self)."""
if option == '':
return
elif not isinstance(option, StringType):
option = `option`
if not isinstance(value, StringType):
value = `value`
names = self._subwidget_names()
for name in names:
self.tk.call(name, 'configure', '-' + option, value)
# These are missing from Tkinter
def image_create(self, imgtype, cnf={}, master=None, **kw):
if not master:
master = Tkinter._default_root
if not master:
raise RuntimeError, 'Too early to create image'
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if callable(v):
v = self._register(v)
options = options + ('-'+k, v)
return master.tk.call(('image', 'create', imgtype,) + options)
def image_delete(self, imgname):
try:
self.tk.call('image', 'delete', imgname)
except TclError:
# May happen if the root was destroyed
pass
# Subwidgets are child widgets created automatically by mega-widgets.
# In python, we have to create these subwidgets manually to mirror their
# existence in Tk/Tix.
class TixSubWidget(TixWidget):
"""Subwidget class.
This is used to mirror child widgets automatically created
by Tix/Tk as part of a mega-widget in Python (which is not informed
of this)"""
def __init__(self, master, name,
destroy_physically=1, check_intermediate=1):
if check_intermediate:
path = master._subwidget_name(name)
try:
path = path[len(master._w)+1:]
plist = path.split('.')
except:
plist = []
if (not check_intermediate) or len(plist) < 2:
# immediate descendant
TixWidget.__init__(self, master, None, None, {'name' : name})
else:
# Ensure that the intermediate widgets exist
parent = master
for i in range(len(plist) - 1):
n = '.'.join(plist[:i+1])
try:
w = master._nametowidget(n)
parent = w
except KeyError:
# Create the intermediate widget
parent = TixSubWidget(parent, plist[i],
destroy_physically=0,
check_intermediate=0)
TixWidget.__init__(self, parent, None, None, {'name' : name})
self.destroy_physically = destroy_physically
def destroy(self):
# For some widgets e.g., a NoteBook, when we call destructors,
# we must be careful not to destroy the frame widget since this
# also destroys the parent NoteBook thus leading to an exception
# in Tkinter when it finally calls Tcl to destroy the NoteBook
for c in self.children.values(): c.destroy()
if self.master.children.has_key(self._name):
del self.master.children[self._name]
if self.master.subwidget_list.has_key(self._name):
del self.master.subwidget_list[self._name]
if self.destroy_physically:
# This is bypassed only for a few widgets
self.tk.call('destroy', self._w)
# Useful func. to split Tcl lists and return as a dict. From Tkinter.py
def _lst2dict(lst):
dict = {}
for x in lst:
dict[x[0][1:]] = (x[0][1:],) + x[1:]
return dict
# Useful class to create a display style - later shared by many items.
# Contributed by Steffen Kremser
class DisplayStyle:
"""DisplayStyle - handle configuration options shared by
(multiple) Display Items"""
def __init__(self, itemtype, cnf={}, **kw ):
master = _default_root # global from Tkinter
if not master and cnf.has_key('refwindow'): master=cnf['refwindow']
elif not master and kw.has_key('refwindow'): master= kw['refwindow']
elif not master: raise RuntimeError, "Too early to create display style: no root window"
self.tk = master.tk
self.stylename = self.tk.call('tixDisplayStyle', itemtype,
*self._options(cnf,kw) )
def __str__(self):
return self.stylename
def _options(self, cnf, kw ):
if kw and cnf:
cnf = _cnfmerge((cnf, kw))
elif kw:
cnf = kw
opts = ()
for k, v in cnf.items():
opts = opts + ('-'+k, v)
return opts
def delete(self):
self.tk.call(self.stylename, 'delete')
def __setitem__(self,key,value):
self.tk.call(self.stylename, 'configure', '-%s'%key, value)
def config(self, cnf={}, **kw):
return _lst2dict(
self.tk.split(
self.tk.call(
self.stylename, 'configure', *self._options(cnf,kw))))
def __getitem__(self,key):
return self.tk.call(self.stylename, 'cget', '-%s'%key)
######################################################
### The Tix Widget classes - in alphabetical order ###
######################################################
class Balloon(TixWidget):
"""Balloon help widget.
Subwidget Class
--------- -----
label Label
message Message"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master=None, cnf={}, **kw):
# static seem to be -installcolormap -initwait -statusbar -cursor
static = ['options', 'installcolormap', 'initwait', 'statusbar',
'cursor']
TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label',
destroy_physically=0)
self.subwidget_list['message'] = _dummyLabel(self, 'message',
destroy_physically=0)
def bind_widget(self, widget, cnf={}, **kw):
"""Bind balloon widget to another.
One balloon widget may be bound to several widgets at the same time"""
self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw))
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
class ButtonBox(TixWidget):
"""ButtonBox - A container for pushbuttons.
Subwidgets are the buttons added with the add method.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixButtonBox',
['orientation', 'options'], cnf, kw)
def add(self, name, cnf={}, **kw):
"""Add a button with given name to box."""
btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return btn
def invoke(self, name):
if self.subwidget_list.has_key(name):
self.tk.call(self._w, 'invoke', name)
class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwdget or selecting from the
listbox subwidget.
Subwidget Class
--------- -----
entry Entry
arrow Button
slistbox ScrolledListBox
tick Button
cross Button : present if created with the fancy option"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixComboBox',
['editable', 'dropdown', 'fancy', 'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
# align
def add_history(self, str):
self.tk.call(self._w, 'addhistory', str)
def append_history(self, str):
self.tk.call(self._w, 'appendhistory', str)
def insert(self, index, str):
self.tk.call(self._w, 'insert', index, str)
def pick(self, index):
self.tk.call(self._w, 'pick', index)
class Control(TixWidget):
"""Control - An entry field with value change arrows. The user can
adjust the value by pressing the two arrow buttons or by entering
the value directly into the entry. The new value will be checked
against the user-defined upper and lower limits.
Subwidget Class
--------- -----
incr Button
decr Button
entry Entry
label Label"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw)
self.subwidget_list['incr'] = _dummyButton(self, 'incr')
self.subwidget_list['decr'] = _dummyButton(self, 'decr')
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def decrement(self):
self.tk.call(self._w, 'decr')
def increment(self):
self.tk.call(self._w, 'incr')
def invoke(self):
self.tk.call(self._w, 'invoke')
def update(self):
self.tk.call(self._w, 'update')
class DirList(TixWidget):
"""DirList - displays a list view of a directory, its previous
directories and its sub-directories. The user can choose one of
the directories displayed in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirTree(TixWidget):
"""DirTree - Directory Listing in a hierarchical view.
Displays a tree view of a directory, its previous directories and its
sub-directories. The user can choose one of the directories displayed
in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirSelectBox(TixWidget):
"""DirSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class ExFileSelectBox(TixWidget):
"""ExFileSelectBox - MS Windows style file select box.
It provides an convenient method for the user to select files.
Subwidget Class
--------- -----
cancel Button
ok Button
hidden Checkbutton
types ComboBox
dir ComboBox
file ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
def filter(self):
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class DirSelectDialog(TixWidget):
"""The DirSelectDialog widget presents the directories in the file
system in a dialog window. The user can use this dialog window to
navigate through the file system to select the desired directory.
Subwidgets Class
---------- -----
dirbox DirSelectDialog"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectDialog',
['options'], cnf, kw)
self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox')
# cancel and ok buttons are missing
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
# Should inherit from a Dialog class
class ExFileSelectDialog(TixWidget):
"""ExFileSelectDialog - MS Windows style file select dialog.
It provides an convenient method for the user to select files.
Subwidgets Class
---------- -----
fsbox ExFileSelectBox"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileSelectBox(TixWidget):
"""ExFileSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
def apply_filter(self): # name of subwidget is same as command
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class FileSelectDialog(TixWidget):
"""FileSelectDialog - Motif style file select dialog.
Subwidgets Class
---------- -----
btns StdButtonBox
fsbox FileSelectBox"""
# FIXME: It should inherit -superclass tixStdDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns')
self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileEntry(TixWidget):
"""FileEntry - Entry field with button that invokes a FileSelectDialog.
The user can type in the filename manually. Alternatively, the user can
press the button widget that sits next to the entry, which will bring
up a file selection dialog.
Subwidgets Class
---------- -----
button Button
entry Entry"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileEntry',
['dialogtype', 'options'], cnf, kw)
self.subwidget_list['button'] = _dummyButton(self, 'button')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def invoke(self):
self.tk.call(self._w, 'invoke')
def file_dialog(self):
# FIXME: return python object
pass
class HList(TixWidget):
"""HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines
according to their places in the hierachy.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixHList',
['columns', 'options'], cnf, kw)
def add(self, entry, cnf={}, **kw):
return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw))
def add_child(self, parent=None, cnf={}, **kw):
if not parent:
parent = ''
return self.tk.call(
self._w, 'addchild', parent, *self._options(cnf, kw))
def anchor_set(self, entry):
self.tk.call(self._w, 'anchor', 'set', entry)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def column_width(self, col=0, width=None, chars=None):
if not chars:
return self.tk.call(self._w, 'column', 'width', col, width)
else:
return self.tk.call(self._w, 'column', 'width', col,
'-char', chars)
def delete_all(self):
self.tk.call(self._w, 'delete', 'all')
def delete_entry(self, entry):
self.tk.call(self._w, 'delete', 'entry', entry)
def delete_offsprings(self, entry):
self.tk.call(self._w, 'delete', 'offsprings', entry)
def delete_siblings(self, entry):
self.tk.call(self._w, 'delete', 'siblings', entry)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def header_create(self, col, cnf={}, **kw):
self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw))
def header_configure(self, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'header', 'configure', col)))
self.tk.call(self._w, 'header', 'configure', col,
*self._options(cnf, kw))
def header_cget(self, col, opt):
return self.tk.call(self._w, 'header', 'cget', col, opt)
def header_exists(self, col):
return self.tk.call(self._w, 'header', 'exists', col)
def header_delete(self, col):
self.tk.call(self._w, 'header', 'delete', col)
def header_size(self, col):
return self.tk.call(self._w, 'header', 'size', col)
def hide_entry(self, entry):
self.tk.call(self._w, 'hide', 'entry', entry)
def indicator_create(self, entry, cnf={}, **kw):
self.tk.call(
self._w, 'indicator', 'create', entry, *self._options(cnf, kw))
def indicator_configure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'indicator', 'configure', entry)))
self.tk.call(
self._w, 'indicator', 'configure', entry, *self._options(cnf, kw))
def indicator_cget(self, entry, opt):
return self.tk.call(self._w, 'indicator', 'cget', entry, opt)
def indicator_exists(self, entry):
return self.tk.call (self._w, 'indicator', 'exists', entry)
def indicator_delete(self, entry):
self.tk.call(self._w, 'indicator', 'delete', entry)
def indicator_size(self, entry):
return self.tk.call(self._w, 'indicator', 'size', entry)
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_children(self, entry=None):
c = self.tk.call(self._w, 'info', 'children', entry)
return self.tk.splitlist(c)
def info_data(self, entry):
return self.tk.call(self._w, 'info', 'data', entry)
def info_exists(self, entry):
return self.tk.call(self._w, 'info', 'exists', entry)
def info_hidden(self, entry):
return self.tk.call(self._w, 'info', 'hidden', entry)
def info_next(self, entry):
return self.tk.call(self._w, 'info', 'next', entry)
def info_parent(self, entry):
return self.tk.call(self._w, 'info', 'parent', entry)
def info_prev(self, entry):
return self.tk.call(self._w, 'info', 'prev', entry)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def item_cget(self, entry, col, opt):
return self.tk.call(self._w, 'item', 'cget', entry, col, opt)
def item_configure(self, entry, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'item', 'configure', entry, col)))
self.tk.call(self._w, 'item', 'configure', entry, col,
*self._options(cnf, kw))
def item_create(self, entry, col, cnf={}, **kw):
self.tk.call(
self._w, 'item', 'create', entry, col, *self._options(cnf, kw))
def item_exists(self, entry, col):
return self.tk.call(self._w, 'item', 'exists', entry, col)
def item_delete(self, entry, col):
self.tk.call(self._w, 'item', 'delete', entry, col)
def nearest(self, y):
return self.tk.call(self._w, 'nearest', y)
def see(self, entry):
self.tk.call(self._w, 'see', entry)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, entry):
return self.tk.call(self._w, 'selection', 'includes', entry)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def show_entry(self, entry):
return self.tk.call(self._w, 'show', 'entry', entry)
def xview(self, *args):
self.tk.call(self._w, 'xview', *args)
def yview(self, *args):
self.tk.call(self._w, 'yview', *args)
class InputOnly(TixWidget):
"""InputOnly - Invisible widget. Unix only.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw)
class LabelEntry(TixWidget):
"""LabelEntry - Entry field with label. Packages an entry widget
and a label into one mega widget. It can beused be used to simplify
the creation of ``entry-form'' type of interface.
Subwidgets Class
---------- -----
label Label
entry Entry"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelEntry',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
class LabelFrame(TixWidget):
"""LabelFrame - Labelled Frame container. Packages a frame widget
and a label into one mega widget. To create widgets inside a
LabelFrame widget, one creates the new widgets relative to the
frame subwidget and manage them inside the frame subwidget.
Subwidgets Class
---------- -----
label Label
frame Frame"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelFrame',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['frame'] = _dummyFrame(self, 'frame')
class ListNoteBook(TixWidget):
"""A ListNoteBook widget is very similar to the TixNoteBook widget:
it can be used to display many windows in a limited space using a
notebook metaphor. The notebook is divided into a stack of pages
(windows). At one time only one of these pages can be shown.
The user can navigate through these pages by
choosing the name of the desired page in the hlist subwidget."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw)
# Is this necessary? It's not an exposed subwidget in Tix.
self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane',
destroy_physically=0)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
class Meter(TixWidget):
"""The Meter widget can be used to show the progress of a background
job which may take a long time to execute.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixMeter',
['options'], cnf, kw)
class NoteBook(TixWidget):
"""NoteBook - Multi-page container widget (tabbed notebook metaphor).
Subwidgets Class
---------- -----
nbframe NoteBookFrame
<pages> page widgets added dynamically with the add method"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw)
self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe',
destroy_physically=0)
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
def raised(self):
return self.tk.call(self._w, 'raised')
class NoteBookFrame(TixWidget):
# FIXME: This is dangerous to expose to be called on its own.
pass
class OptionMenu(TixWidget):
"""OptionMenu - creates a menu button of options.
Subwidget Class
--------- -----
menubutton Menubutton
menu Menu"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixOptionMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def add_command(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw))
def add_separator(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw))
def delete(self, name):
self.tk.call(self._w, 'delete', name)
def disable(self, name):
self.tk.call(self._w, 'disable', name)
def enable(self, name):
self.tk.call(self._w, 'enable', name)
class PanedWindow(TixWidget):
"""PanedWindow - Multi-pane container widget
allows the user to interactively manipulate the sizes of several
panes. The panes can be arranged either vertically or horizontally.The
user changes the sizes of the panes by dragging the resize handle
between two panes.
Subwidgets Class
---------- -----
<panes> g/p widgets added dynamically with the add method."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw)
# add delete forget panecget paneconfigure panes setsize
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name,
check_intermediate=0)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def forget(self, name):
self.tk.call(self._w, 'forget', name)
def panecget(self, entry, opt):
return self.tk.call(self._w, 'panecget', entry, opt)
def paneconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'paneconfigure', entry)))
self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw))
def panes(self):
names = self.tk.call(self._w, 'panes')
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
class PopupMenu(TixWidget):
"""PopupMenu widget can be used as a replacement of the tk_popup command.
The advantage of the Tix PopupMenu widget is it requires less application
code to manipulate.
Subwidgets Class
---------- -----
menubutton Menubutton
menu Menu"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def bind_widget(self, widget):
self.tk.call(self._w, 'bind', widget._w)
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
def post_widget(self, widget, x, y):
self.tk.call(self._w, 'post', widget._w, x, y)
class ResizeHandle(TixWidget):
"""Internal widget to draw resize handles on Scrolled widgets."""
def __init__(self, master, cnf={}, **kw):
# There seems to be a Tix bug rejecting the configure method
# Let's try making the flags -static
flags = ['options', 'command', 'cursorfg', 'cursorbg',
'handlesize', 'hintcolor', 'hintwidth',
'x', 'y']
# In fact, x y height width are configurable
TixWidget.__init__(self, master, 'tixResizeHandle',
flags, cnf, kw)
def attach_widget(self, widget):
self.tk.call(self._w, 'attachwidget', widget._w)
def detach_widget(self, widget):
self.tk.call(self._w, 'detachwidget', widget._w)
def hide(self, widget):
self.tk.call(self._w, 'hide', widget._w)
def show(self, widget):
self.tk.call(self._w, 'show', widget._w)
class ScrolledHList(TixWidget):
"""ScrolledHList - HList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledHList', ['options'],
cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledListBox(TixWidget):
"""ScrolledListBox - Listbox with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledText(TixWidget):
"""ScrolledText - Text with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw)
self.subwidget_list['text'] = _dummyText(self, 'text')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledTList(TixWidget):
"""ScrolledTList - TList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledTList', ['options'],
cnf, kw)
self.subwidget_list['tlist'] = _dummyTList(self, 'tlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledWindow(TixWidget):
"""ScrolledWindow - Window with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw)
self.subwidget_list['window'] = _dummyFrame(self, 'window')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class Select(TixWidget):
"""Select - Container of button subwidgets. It can be used to provide
radio-box or check-box style of selection options for the user.
Subwidgets are buttons added dynamically using the add method."""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixSelect',
['allowzero', 'radio', 'orientation', 'labelside',
'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return self.subwidget_list[name]
def invoke(self, name):
self.tk.call(self._w, 'invoke', name)
class Shell(TixWidget):
"""Toplevel window.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw)
class DialogShell(TixWidget):
"""Toplevel window, with popup popdown and center methods.
It tells the window manager that it is a dialog window and should be
treated specially. The exact treatment depends on the treatment of
the window manager.
Subwidgets - None"""
# FIXME: It should inherit from Shell
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master,
'tixDialogShell',
['options', 'title', 'mapped',
'minheight', 'minwidth',
'parent', 'transient'], cnf, kw)
def popdown(self):
self.tk.call(self._w, 'popdown')
def popup(self):
self.tk.call(self._w, 'popup')
def center(self):
self.tk.call(self._w, 'center')
class StdButtonBox(TixWidget):
"""StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixStdButtonBox',
['orientation', 'options'], cnf, kw)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
def invoke(self, name):
if self.subwidget_list.has_key(name):
self.tk.call(self._w, 'invoke', name)
class TList(TixWidget):
"""TList - Hierarchy display widget which can be
used to display data in a tabular format. The list entries of a TList
widget are similar to the entries in the Tk listbox widget. The main
differences are (1) the TList widget can display the list entries in a
two dimensional format and (2) you can use graphical images as well as
multiple colors and fonts for the list entries.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw)
def active_set(self, index):
self.tk.call(self._w, 'active', 'set', index)
def active_clear(self):
self.tk.call(self._w, 'active', 'clear')
def anchor_set(self, index):
self.tk.call(self._w, 'anchor', 'set', index)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def delete(self, from_, to=None):
self.tk.call(self._w, 'delete', from_, to)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def insert(self, index, cnf={}, **kw):
self.tk.call(self._w, 'insert', index, *self._options(cnf, kw))
def info_active(self):
return self.tk.call(self._w, 'info', 'active')
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_down(self, index):
return self.tk.call(self._w, 'info', 'down', index)
def info_left(self, index):
return self.tk.call(self._w, 'info', 'left', index)
def info_right(self, index):
return self.tk.call(self._w, 'info', 'right', index)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def info_size(self):
return self.tk.call(self._w, 'info', 'size')
def info_up(self, index):
return self.tk.call(self._w, 'info', 'up', index)
def nearest(self, x, y):
return self.tk.call(self._w, 'nearest', x, y)
def see(self, index):
self.tk.call(self._w, 'see', index)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, index):
return self.tk.call(self._w, 'selection', 'includes', index)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def xview(self, *args):
self.tk.call(self._w, 'xview', *args)
def yview(self, *args):
self.tk.call(self._w, 'yview', *args)
class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierachical
data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTree',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def setmode(self, entrypath, mode='none'):
'''This command is used to indicate whether the entry given by
entryPath has children entries and whether the children are visible. mode
must be one of open, close or none. If mode is set to open, a (+)
indicator is drawn next the the entry. If mode is set to close, a (-)
indicator is drawn next the the entry. If mode is set to none, no
indicators will be drawn for this entry. The default mode is none. The
open mode indicates the entry has hidden children and this entry can be
opened by the user. The close mode indicates that all the children of the
entry are now visible and the entry can be closed by the user.'''
self.tk.call(self._w, 'setmode', entrypath, mode)
# Could try subclassing Tree for CheckList - would need another arg to init
class CheckList(TixWidget):
"""The CheckList widget
displays a list of items to be selected by the user. CheckList acts
similarly to the Tk checkbutton or radiobutton widgets, except it is
capable of handling many more items than checkbuttons or radiobuttons.
"""
# FIXME: It should inherit -superclass tixTree
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixCheckList',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def getselection(self, mode='on'):
'''Returns a list of items whose status matches status. If status is
not specified, the list of items in the "on" status will be returned.
Mode can be on, off, default'''
c = self.tk.split(self.tk.call(self._w, 'getselection', mode))
return self.tk.splitlist(c)
def getstatus(self, entrypath):
'''Returns the current status of entryPath.'''
return self.tk.call(self._w, 'getstatus', entrypath)
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
###########################################################################
### The subclassing below is used to instantiate the subwidgets in each ###
### mega widget. This allows us to access their methods directly. ###
###########################################################################
class _dummyButton(Button, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyCheckbutton(Checkbutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyEntry(Entry, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyFrame(Frame, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyLabel(Label, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyListbox(Listbox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenu(Menu, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenubutton(Menubutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrollbar(Scrollbar, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyText(Text, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledListBox(ScrolledListBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyHList(HList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledHList(ScrolledHList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyTList(TList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically])
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
#cross Button : present if created with the fancy option
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
class _dummyDirList(DirList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyDirSelectBox(DirSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
class _dummyFileSelectBox(FileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
class _dummyFileComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx')
class _dummyStdButtonBox(StdButtonBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget):
def __init__(self, master, name, destroy_physically=0):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyPanedWindow(PanedWindow, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
########################
### Utility Routines ###
########################
#mike Should tixDestroy be exposed as a wrapper? - but not for widgets.
def OptionName(widget):
'''Returns the qualified path name for the widget. Normally used to set
default options for subwidgets. See tixwidgets.py'''
return widget.tk.call('tixOptionName', widget._w)
# Called with a dictionary argument of the form
# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'}
# returns a string which can be used to configure the fsbox file types
# in an ExFileSelectBox. i.e.,
# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}'
def FileTypeList(dict):
s = ''
for type in dict.keys():
s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} '
return s
# Still to be done:
# tixIconView
class CObjView(TixWidget):
"""This file implements the Canvas Object View widget. This is a base
class of IconView. It implements automatic placement/adjustment of the
scrollbars according to the canvas objects inside the canvas subwidget.
The scrollbars are adjusted so that the canvas is just large enough
to see all the objects.
"""
# FIXME: It should inherit -superclass tixScrolledWidget
pass
class Grid(TixWidget):
'''The Tix Grid command creates a new window and makes it into a
tixGrid widget. Additional options, may be specified on the command
line or in the option database to configure aspects such as its cursor
and relief.
A Grid widget displays its contents in a two dimensional grid of cells.
Each cell may contain one Tix display item, which may be in text,
graphics or other formats. See the DisplayStyle class for more information
about Tix display items. Individual cells, or groups of cells, can be
formatted with a wide range of attributes, such as its color, relief and
border.
Subwidgets - None'''
pass
# def anchor option ?args ...?
# def bdtype
# def delete dim from ?to?
# def edit apply
# def edit set x y
# def entrycget x y option
# def entryconfigure x y ?option? ?value option value ...?
# def format
# def index
# def move dim from to offset
# def set x y ?-itemtype type? ?option value...?
# def size dim index ?option value ...?
# def unset x y
# def xview
# def yview
class ScrolledGrid(TixWidget):
'''Scrolled Grid widgets'''
# FIXME: It should inherit -superclass tixScrolledWidget
pass
|
{
"content_hash": "8dcf1fe070b7b8108c8743db5676ee9b",
"timestamp": "",
"source": "github",
"line_count": 1773,
"max_line_length": 96,
"avg_line_length": 39.186689227298366,
"alnum_prop": 0.6054578427703734,
"repo_name": "MalloyPower/parsing-python",
"id": "b35d423dd14a6179fc3e837c3ba947229e0fa2a5",
"size": "70438",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-2.3/Lib/lib-tk/Tix.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
}
|
"""Glue between metadata sources and the matching logic."""
from __future__ import division, absolute_import, print_function
from collections import namedtuple
import re
from beets import logging
from beets import plugins
from beets import config
from beets.autotag import mb
from jellyfish import levenshtein_distance
from unidecode import unidecode
log = logging.getLogger('beets')
# Classes used to represent candidate options.
class AlbumInfo(object):
"""Describes a canonical release that may be used to match a release
in the library. Consists of these data members:
- ``album``: the release title
- ``album_id``: MusicBrainz ID; UUID fragment only
- ``artist``: name of the release's primary artist
- ``artist_id``
- ``tracks``: list of TrackInfo objects making up the release
- ``asin``: Amazon ASIN
- ``albumtype``: string describing the kind of release
- ``va``: boolean: whether the release has "various artists"
- ``year``: release year
- ``month``: release month
- ``day``: release day
- ``label``: music label responsible for the release
- ``mediums``: the number of discs in this release
- ``artist_sort``: name of the release's artist for sorting
- ``releasegroup_id``: MBID for the album's release group
- ``catalognum``: the label's catalog number for the release
- ``script``: character set used for metadata
- ``language``: human language of the metadata
- ``country``: the release country
- ``albumstatus``: MusicBrainz release status (Official, etc.)
- ``media``: delivery mechanism (Vinyl, etc.)
- ``albumdisambig``: MusicBrainz release disambiguation comment
- ``artist_credit``: Release-specific artist name
- ``data_source``: The original data source (MusicBrainz, Discogs, etc.)
- ``data_url``: The data source release URL.
The fields up through ``tracks`` are required. The others are
optional and may be None.
"""
def __init__(self, album, album_id, artist, artist_id, tracks, asin=None,
albumtype=None, va=False, year=None, month=None, day=None,
label=None, mediums=None, artist_sort=None,
releasegroup_id=None, catalognum=None, script=None,
language=None, country=None, albumstatus=None, media=None,
albumdisambig=None, artist_credit=None, original_year=None,
original_month=None, original_day=None, data_source=None,
data_url=None):
self.album = album
self.album_id = album_id
self.artist = artist
self.artist_id = artist_id
self.tracks = tracks
self.asin = asin
self.albumtype = albumtype
self.va = va
self.year = year
self.month = month
self.day = day
self.label = label
self.mediums = mediums
self.artist_sort = artist_sort
self.releasegroup_id = releasegroup_id
self.catalognum = catalognum
self.script = script
self.language = language
self.country = country
self.albumstatus = albumstatus
self.media = media
self.albumdisambig = albumdisambig
self.artist_credit = artist_credit
self.original_year = original_year
self.original_month = original_month
self.original_day = original_day
self.data_source = data_source
self.data_url = data_url
# Work around a bug in python-musicbrainz-ngs that causes some
# strings to be bytes rather than Unicode.
# https://github.com/alastair/python-musicbrainz-ngs/issues/85
def decode(self, codec='utf8'):
"""Ensure that all string attributes on this object, and the
constituent `TrackInfo` objects, are decoded to Unicode.
"""
for fld in ['album', 'artist', 'albumtype', 'label', 'artist_sort',
'catalognum', 'script', 'language', 'country',
'albumstatus', 'albumdisambig', 'artist_credit', 'media']:
value = getattr(self, fld)
if isinstance(value, bytes):
setattr(self, fld, value.decode(codec, 'ignore'))
if self.tracks:
for track in self.tracks:
track.decode(codec)
class TrackInfo(object):
"""Describes a canonical track present on a release. Appears as part
of an AlbumInfo's ``tracks`` list. Consists of these data members:
- ``title``: name of the track
- ``track_id``: MusicBrainz ID; UUID fragment only
- ``artist``: individual track artist name
- ``artist_id``
- ``length``: float: duration of the track in seconds
- ``index``: position on the entire release
- ``media``: delivery mechanism (Vinyl, etc.)
- ``medium``: the disc number this track appears on in the album
- ``medium_index``: the track's position on the disc
- ``medium_total``: the number of tracks on the item's disc
- ``artist_sort``: name of the track artist for sorting
- ``disctitle``: name of the individual medium (subtitle)
- ``artist_credit``: Recording-specific artist name
- ``data_source``: The original data source (MusicBrainz, Discogs, etc.)
- ``data_url``: The data source release URL.
Only ``title`` and ``track_id`` are required. The rest of the fields
may be None. The indices ``index``, ``medium``, and ``medium_index``
are all 1-based.
"""
def __init__(self, title, track_id, artist=None, artist_id=None,
length=None, index=None, medium=None, medium_index=None,
medium_total=None, artist_sort=None, disctitle=None,
artist_credit=None, data_source=None, data_url=None,
media=None):
self.title = title
self.track_id = track_id
self.artist = artist
self.artist_id = artist_id
self.length = length
self.index = index
self.media = media
self.medium = medium
self.medium_index = medium_index
self.medium_total = medium_total
self.artist_sort = artist_sort
self.disctitle = disctitle
self.artist_credit = artist_credit
self.data_source = data_source
self.data_url = data_url
# As above, work around a bug in python-musicbrainz-ngs.
def decode(self, codec='utf8'):
"""Ensure that all string attributes on this object are decoded
to Unicode.
"""
for fld in ['title', 'artist', 'medium', 'artist_sort', 'disctitle',
'artist_credit', 'media']:
value = getattr(self, fld)
if isinstance(value, bytes):
setattr(self, fld, value.decode(codec, 'ignore'))
# Candidate distance scoring.
# Parameters for string distance function.
# Words that can be moved to the end of a string using a comma.
SD_END_WORDS = ['the', 'a', 'an']
# Reduced weights for certain portions of the string.
SD_PATTERNS = [
(r'^the ', 0.1),
(r'[\[\(]?(ep|single)[\]\)]?', 0.0),
(r'[\[\(]?(featuring|feat|ft)[\. :].+', 0.1),
(r'\(.*?\)', 0.3),
(r'\[.*?\]', 0.3),
(r'(, )?(pt\.|part) .+', 0.2),
]
# Replacements to use before testing distance.
SD_REPLACE = [
(r'&', 'and'),
]
def _string_dist_basic(str1, str2):
"""Basic edit distance between two strings, ignoring
non-alphanumeric characters and case. Comparisons are based on a
transliteration/lowering to ASCII characters. Normalized by string
length.
"""
assert isinstance(str1, unicode)
assert isinstance(str2, unicode)
str1 = unidecode(str1).decode('ascii')
str2 = unidecode(str2).decode('ascii')
str1 = re.sub(r'[^a-z0-9]', '', str1.lower())
str2 = re.sub(r'[^a-z0-9]', '', str2.lower())
if not str1 and not str2:
return 0.0
return levenshtein_distance(str1, str2) / float(max(len(str1), len(str2)))
def string_dist(str1, str2):
"""Gives an "intuitive" edit distance between two strings. This is
an edit distance, normalized by the string length, with a number of
tweaks that reflect intuition about text.
"""
if str1 is None and str2 is None:
return 0.0
if str1 is None or str2 is None:
return 1.0
str1 = str1.lower()
str2 = str2.lower()
# Don't penalize strings that move certain words to the end. For
# example, "the something" should be considered equal to
# "something, the".
for word in SD_END_WORDS:
if str1.endswith(', %s' % word):
str1 = '%s %s' % (word, str1[:-len(word) - 2])
if str2.endswith(', %s' % word):
str2 = '%s %s' % (word, str2[:-len(word) - 2])
# Perform a couple of basic normalizing substitutions.
for pat, repl in SD_REPLACE:
str1 = re.sub(pat, repl, str1)
str2 = re.sub(pat, repl, str2)
# Change the weight for certain string portions matched by a set
# of regular expressions. We gradually change the strings and build
# up penalties associated with parts of the string that were
# deleted.
base_dist = _string_dist_basic(str1, str2)
penalty = 0.0
for pat, weight in SD_PATTERNS:
# Get strings that drop the pattern.
case_str1 = re.sub(pat, '', str1)
case_str2 = re.sub(pat, '', str2)
if case_str1 != str1 or case_str2 != str2:
# If the pattern was present (i.e., it is deleted in the
# the current case), recalculate the distances for the
# modified strings.
case_dist = _string_dist_basic(case_str1, case_str2)
case_delta = max(0.0, base_dist - case_dist)
if case_delta == 0.0:
continue
# Shift our baseline strings down (to avoid rematching the
# same part of the string) and add a scaled distance
# amount to the penalties.
str1 = case_str1
str2 = case_str2
base_dist = case_dist
penalty += weight * case_delta
return base_dist + penalty
class LazyClassProperty(object):
"""A decorator implementing a read-only property that is *lazy* in
the sense that the getter is only invoked once. Subsequent accesses
through *any* instance use the cached result.
"""
def __init__(self, getter):
self.getter = getter
self.computed = False
def __get__(self, obj, owner):
if not self.computed:
self.value = self.getter(owner)
self.computed = True
return self.value
class Distance(object):
"""Keeps track of multiple distance penalties. Provides a single
weighted distance for all penalties as well as a weighted distance
for each individual penalty.
"""
def __init__(self):
self._penalties = {}
@LazyClassProperty
def _weights(cls): # noqa
"""A dictionary from keys to floating-point weights.
"""
weights_view = config['match']['distance_weights']
weights = {}
for key in weights_view.keys():
weights[key] = weights_view[key].as_number()
return weights
# Access the components and their aggregates.
@property
def distance(self):
"""Return a weighted and normalized distance across all
penalties.
"""
dist_max = self.max_distance
if dist_max:
return self.raw_distance / self.max_distance
return 0.0
@property
def max_distance(self):
"""Return the maximum distance penalty (normalization factor).
"""
dist_max = 0.0
for key, penalty in self._penalties.iteritems():
dist_max += len(penalty) * self._weights[key]
return dist_max
@property
def raw_distance(self):
"""Return the raw (denormalized) distance.
"""
dist_raw = 0.0
for key, penalty in self._penalties.iteritems():
dist_raw += sum(penalty) * self._weights[key]
return dist_raw
def items(self):
"""Return a list of (key, dist) pairs, with `dist` being the
weighted distance, sorted from highest to lowest. Does not
include penalties with a zero value.
"""
list_ = []
for key in self._penalties:
dist = self[key]
if dist:
list_.append((key, dist))
# Convert distance into a negative float we can sort items in
# ascending order (for keys, when the penalty is equal) and
# still get the items with the biggest distance first.
return sorted(list_, key=lambda (key, dist): (0 - dist, key))
# Behave like a float.
def __cmp__(self, other):
return cmp(self.distance, other)
def __float__(self):
return self.distance
def __sub__(self, other):
return self.distance - other
def __rsub__(self, other):
return other - self.distance
def __unicode__(self):
return "{0:.2f}".format(self.distance)
# Behave like a dict.
def __getitem__(self, key):
"""Returns the weighted distance for a named penalty.
"""
dist = sum(self._penalties[key]) * self._weights[key]
dist_max = self.max_distance
if dist_max:
return dist / dist_max
return 0.0
def __iter__(self):
return iter(self.items())
def __len__(self):
return len(self.items())
def keys(self):
return [key for key, _ in self.items()]
def update(self, dist):
"""Adds all the distance penalties from `dist`.
"""
if not isinstance(dist, Distance):
raise ValueError(
u'`dist` must be a Distance object, not {0}'.format(type(dist))
)
for key, penalties in dist._penalties.iteritems():
self._penalties.setdefault(key, []).extend(penalties)
# Adding components.
def _eq(self, value1, value2):
"""Returns True if `value1` is equal to `value2`. `value1` may
be a compiled regular expression, in which case it will be
matched against `value2`.
"""
if isinstance(value1, re._pattern_type):
return bool(value1.match(value2))
return value1 == value2
def add(self, key, dist):
"""Adds a distance penalty. `key` must correspond with a
configured weight setting. `dist` must be a float between 0.0
and 1.0, and will be added to any existing distance penalties
for the same key.
"""
if not 0.0 <= dist <= 1.0:
raise ValueError(
u'`dist` must be between 0.0 and 1.0, not {0}'.format(dist)
)
self._penalties.setdefault(key, []).append(dist)
def add_equality(self, key, value, options):
"""Adds a distance penalty of 1.0 if `value` doesn't match any
of the values in `options`. If an option is a compiled regular
expression, it will be considered equal if it matches against
`value`.
"""
if not isinstance(options, (list, tuple)):
options = [options]
for opt in options:
if self._eq(opt, value):
dist = 0.0
break
else:
dist = 1.0
self.add(key, dist)
def add_expr(self, key, expr):
"""Adds a distance penalty of 1.0 if `expr` evaluates to True,
or 0.0.
"""
if expr:
self.add(key, 1.0)
else:
self.add(key, 0.0)
def add_number(self, key, number1, number2):
"""Adds a distance penalty of 1.0 for each number of difference
between `number1` and `number2`, or 0.0 when there is no
difference. Use this when there is no upper limit on the
difference between the two numbers.
"""
diff = abs(number1 - number2)
if diff:
for i in range(diff):
self.add(key, 1.0)
else:
self.add(key, 0.0)
def add_priority(self, key, value, options):
"""Adds a distance penalty that corresponds to the position at
which `value` appears in `options`. A distance penalty of 0.0
for the first option, or 1.0 if there is no matching option. If
an option is a compiled regular expression, it will be
considered equal if it matches against `value`.
"""
if not isinstance(options, (list, tuple)):
options = [options]
unit = 1.0 / (len(options) or 1)
for i, opt in enumerate(options):
if self._eq(opt, value):
dist = i * unit
break
else:
dist = 1.0
self.add(key, dist)
def add_ratio(self, key, number1, number2):
"""Adds a distance penalty for `number1` as a ratio of `number2`.
`number1` is bound at 0 and `number2`.
"""
number = float(max(min(number1, number2), 0))
if number2:
dist = number / number2
else:
dist = 0.0
self.add(key, dist)
def add_string(self, key, str1, str2):
"""Adds a distance penalty based on the edit distance between
`str1` and `str2`.
"""
dist = string_dist(str1, str2)
self.add(key, dist)
# Structures that compose all the information for a candidate match.
AlbumMatch = namedtuple('AlbumMatch', ['distance', 'info', 'mapping',
'extra_items', 'extra_tracks'])
TrackMatch = namedtuple('TrackMatch', ['distance', 'info'])
# Aggregation of sources.
def album_for_mbid(release_id):
"""Get an AlbumInfo object for a MusicBrainz release ID. Return None
if the ID is not found.
"""
try:
album = mb.album_for_id(release_id)
if album:
plugins.send(u'albuminfo_received', info=album)
return album
except mb.MusicBrainzAPIError as exc:
exc.log(log)
def track_for_mbid(recording_id):
"""Get a TrackInfo object for a MusicBrainz recording ID. Return None
if the ID is not found.
"""
try:
track = mb.track_for_id(recording_id)
if track:
plugins.send(u'trackinfo_received', info=track)
return track
except mb.MusicBrainzAPIError as exc:
exc.log(log)
def albums_for_id(album_id):
"""Get a list of albums for an ID."""
candidates = [album_for_mbid(album_id)]
plugin_albums = plugins.album_for_id(album_id)
for a in plugin_albums:
plugins.send(u'albuminfo_received', info=a)
candidates.extend(plugin_albums)
return filter(None, candidates)
def tracks_for_id(track_id):
"""Get a list of tracks for an ID."""
candidates = [track_for_mbid(track_id)]
plugin_tracks = plugins.track_for_id(track_id)
for t in plugin_tracks:
plugins.send(u'trackinfo_received', info=t)
candidates.extend(plugin_tracks)
return filter(None, candidates)
def album_candidates(items, artist, album, va_likely):
"""Search for album matches. ``items`` is a list of Item objects
that make up the album. ``artist`` and ``album`` are the respective
names (strings), which may be derived from the item list or may be
entered by the user. ``va_likely`` is a boolean indicating whether
the album is likely to be a "various artists" release.
"""
out = []
# Base candidates if we have album and artist to match.
if artist and album:
try:
out.extend(mb.match_album(artist, album, len(items)))
except mb.MusicBrainzAPIError as exc:
exc.log(log)
# Also add VA matches from MusicBrainz where appropriate.
if va_likely and album:
try:
out.extend(mb.match_album(None, album, len(items)))
except mb.MusicBrainzAPIError as exc:
exc.log(log)
# Candidates from plugins.
out.extend(plugins.candidates(items, artist, album, va_likely))
# Notify subscribed plugins about fetched album info
for a in out:
plugins.send(u'albuminfo_received', info=a)
return out
def item_candidates(item, artist, title):
"""Search for item matches. ``item`` is the Item to be matched.
``artist`` and ``title`` are strings and either reflect the item or
are specified by the user.
"""
out = []
# MusicBrainz candidates.
if artist and title:
try:
out.extend(mb.match_track(artist, title))
except mb.MusicBrainzAPIError as exc:
exc.log(log)
# Plugin candidates.
out.extend(plugins.item_candidates(item, artist, title))
# Notify subscribed plugins about fetched track info
for i in out:
plugins.send(u'trackinfo_received', info=i)
return out
|
{
"content_hash": "af458e8242ceef26298c34bf9d23a211",
"timestamp": "",
"source": "github",
"line_count": 594,
"max_line_length": 79,
"avg_line_length": 34.936026936026934,
"alnum_prop": 0.6027370855821126,
"repo_name": "Freso/beets",
"id": "5c8e0e2c502d9d40ca98091c0b6a85de12fd955f",
"size": "21423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "beets/autotag/hooks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2951"
},
{
"name": "HTML",
"bytes": "3307"
},
{
"name": "JavaScript",
"bytes": "85950"
},
{
"name": "Python",
"bytes": "1576789"
},
{
"name": "Shell",
"bytes": "7413"
}
],
"symlink_target": ""
}
|
import datetime
import csv
myfile = open('timetable.csv', 'wb')
wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
start = datetime.datetime.strptime("2013-10-01 00:00:00", "%Y-%m-%d %H:%M:%S")
end = datetime.datetime.strptime("2014-10-01 00:00:00", "%Y-%m-%d %H:%M:%S")
# date_generated2 = [start + datetime.timedelta(days=x) for x in range(0, (end-start).days)]
# date_generated2 = [start + datetime.timedelta(seconds=x) for x in range(0, (end-start).days, 600)]
date_generated2 = [start + datetime.timedelta(seconds=x) for x in range(0, 31536000, 600)]
timekey = 0
for date in date_generated2:
timekey += 1
wr.writerow((timekey, date.strftime("%Y-%m-%d"), date.strftime("%H:%M:%S")))
|
{
"content_hash": "5e47df3662a6c6376976e3d583678f92",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 100,
"avg_line_length": 34.95,
"alnum_prop": 0.6680972818311874,
"repo_name": "meaton00/class_project",
"id": "1cb138e09c328efebfd23edd2d5b8f2f005af3b8",
"size": "845",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/datetime_dimension/createtimedimension-10min-csv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24715"
}
],
"symlink_target": ""
}
|
from typing import TypeVar, Generic, Callable, Any
T = TypeVar('T')
U = TypeVar('U')
class Either(Generic[T]):
"""
The Either type represents values with two possibilities: B value of type Either[A, B] is either Left[A or Right[B]
But not both in the same time.
"""
def __init__(self, value: T) -> None:
self.value = value
def __eq__(self, other: object) -> bool:
return isinstance(other, Either) and\
self.value == other.value and\
self.is_right() == other.is_right()
def case(self, error: Callable[[T], U], success: Callable[[T], U]) -> U:
"""
Take 2 functions call only one of then with either value and return her result.
:params error: function to call when Either is Left
:type error: Function(A) -> B
:params success: function to call when Either is Right
:type success: Function(A) -> B
:returns: result of success handler when Eihter is Right, result of error handler when Eihter is Left
:rtpye: B
"""
if self.is_right():
return success(self.value)
return error(self.value)
def ap(self, applicative):
"""
Applies the function inside the Either[A] structure to another applicative type.
:param applicative: applicative contains function
:type applicative: Either[B]
:returns: new Either with result of contains function
:rtype: Either[A(B)]
"""
return applicative.map(self.value)
def to_box(self):
"""
Transform Either to Box.
:returns: Box monad with previous value
:rtype: Box[A]
"""
from pymonet.box import Box
return Box(self.value)
def to_try(self):
"""
Transform Either to Try.
:returns: resolved Try monad with previous value. Right is resolved successfully, Left not.
:rtype: Box[A]
"""
from pymonet.monad_try import Try
return Try(self.value, is_success=self.is_right())
def to_lazy(self):
"""
Transform Either to Try.
:returns: Lazy monad with function returning previous value
:rtype: Lazy[Function() -> A]
"""
from pymonet.lazy import Lazy
return Lazy(lambda: self.value)
def is_right(self):
pass
class Left(Either, Generic[T]):
"""Not successfully Either"""
def map(self, _: Callable[[Any], Any]) -> 'Left[T]':
"""
Take mapper function and return new instance of Left with the same value.
:returns: Copy of self
:rtype: Left[A]
"""
return Left(self.value)
def bind(self, _) -> 'Left[T]':
"""
Take mapper function and return value of Left.
:returns: Stored value
:rtype: A
"""
return self
def ap(self, monad):
"""
:returns: Copy of self
:rtype: Left[A]
"""
return Left(self.value)
def is_left(self) -> bool:
"""
:returns: True
:rtype: Boolean
"""
return True
def is_right(self) -> bool:
"""
:returns: False
:rtype: Boolean
"""
return False
def to_maybe(self):
"""
Transform Either to Maybe.
:returns: Empty Maybe
:rtype: Maybe[None]
"""
from pymonet.maybe import Maybe
return Maybe.nothing()
def to_validation(self):
"""
Transform Box into Validation.
:returns: failed Validation monad with previous value as error
:rtype: Validation[None, [A]]
"""
from pymonet.validation import Validation
return Validation.fail([self.value])
class Right(Either):
"""Not successfully Either"""
def map(self, mapper: Callable[[T], U]) -> Either[U]:
"""
Take mapper function and return new instance of Right with mapped value.
:param mapper: function to apply on Right value
:type mapper: Function(A) -> B
:returns: new Right with result of mapper
:rtype: Right[B]
"""
return Right(mapper(self.value))
def bind(self, mapper: Callable[[T], U]) -> U:
"""
Take mapper function and returns result of them called with Right value.
:param mapper: function to apply on Right value
:type mapper: Function(A) -> Either[B]
:returns: result of mapper
:rtype: Either[B]
"""
return mapper(self.value)
def is_right(self) -> bool:
"""
:returns: True
:rtype: Boolean
"""
return True
def is_left(self) -> bool:
"""
:returns: False
:rtype: Boolean
"""
return False
def to_maybe(self):
"""
Transform Either to Maybe.
:returns: Maybe with previous value
:rtype: Maybe[A]
"""
from pymonet.maybe import Maybe
return Maybe.just(self.value)
def to_validation(self):
"""
Transform Either into Validation.
:returns: successfull Validation monad with previous value
:rtype: Validation[A, []]
"""
from pymonet.validation import Validation
return Validation.success(self.value)
|
{
"content_hash": "4beb6f4c746cd48d682561d0f5293948",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 119,
"avg_line_length": 25.535885167464116,
"alnum_prop": 0.563050402848042,
"repo_name": "przemyslawjanpietrzak/pyMonet",
"id": "8d52cb87145c0cbaff99d0814e076db60a6dab75",
"size": "5337",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pymonet/either.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "507"
},
{
"name": "Python",
"bytes": "91428"
}
],
"symlink_target": ""
}
|
import sys
import os
from os import system
import tashi.services.layoutlocality.localityservice as localityservice
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from tashi.util import getConfig
(config, configFiles) = getConfig(["Client"])
host = config.get('LocalityService', 'host')
port = int(config.get('LocalityService', 'port'))
socket = TSocket.TSocket(host, port)
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = localityservice.Client(protocol)
transport.open()
while True:
line1 = "\n"
line2 = "\n"
while line1 != "":
line1 = sys.stdin.readline()
if line1 == "":
sys.exit(0)
if line1 != "\n":
break
line1 = line1.strip()
while line2 != "":
line2 = sys.stdin.readline()
if line2 == "":
sys.exit(0)
if line2 != "\n":
break
line2 = line2.strip()
sources = line1.split(" ")
destinations = line2.split(" ")
mat = client.getHopCountMatrix(sources, destinations)
for r in mat:
for c in r:
print '%f\t'%c,
print '\n',
print '\n',
|
{
"content_hash": "3433587de75bb7dcc12b4607ae2a6fc5",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 71,
"avg_line_length": 22.94,
"alnum_prop": 0.7061900610287707,
"repo_name": "apache/tashi",
"id": "49ecb1190f6fba4e1c1304c6841dc4ffc77860f4",
"size": "1953",
"binary": false,
"copies": "1",
"ref": "refs/heads/cmu",
"path": "src/utils/getLocality.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3092"
},
{
"name": "Makefile",
"bytes": "7418"
},
{
"name": "PHP",
"bytes": "28750"
},
{
"name": "Python",
"bytes": "573021"
},
{
"name": "Shell",
"bytes": "25933"
},
{
"name": "Thrift",
"bytes": "7777"
}
],
"symlink_target": ""
}
|
import os
import sys
from _pydevd_bundle.pydevd_constants import CYTHON_SUPPORTED
use_cython = os.getenv('PYDEVD_USE_CYTHON', None)
dirname = os.path.dirname(os.path.dirname(__file__))
# Do not show incorrect warning for .egg files for Remote debugger
if not CYTHON_SUPPORTED or dirname.endswith('.egg'):
# Do not try to import cython extensions if cython isn't supported
use_cython = 'NO'
def delete_old_compiled_extensions():
pydev_dir = os.path.dirname(os.path.dirname(__file__))
_pydevd_bundle_dir = os.path.dirname(__file__)
_pydevd_frame_eval_dir = os.path.join(pydev_dir, '_pydevd_frame_eval')
try:
import shutil
for file in os.listdir(_pydevd_bundle_dir):
if file.startswith("pydevd") and file.endswith(".so"):
os.remove(os.path.join(_pydevd_bundle_dir, file))
for file in os.listdir(_pydevd_frame_eval_dir):
if file.startswith("pydevd") and file.endswith(".so"):
os.remove(os.path.join(_pydevd_frame_eval_dir, file))
build_dir = os.path.join(pydev_dir, "build")
if os.path.exists(build_dir):
shutil.rmtree(os.path.join(pydev_dir, "build"))
except OSError:
from _pydev_bundle.pydev_monkey import log_error_once
log_error_once("warning: failed to delete old cython speedups. Please delete all *.so files from the directories "
"\"%s\" and \"%s\"" % (_pydevd_bundle_dir, _pydevd_frame_eval_dir))
if use_cython == 'YES':
# We must import the cython version if forcing cython
from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips
def trace_dispatch(py_db, frame, event, arg):
return _trace_dispatch(py_db, frame, event, arg)
elif use_cython == 'NO':
# Use the regular version if not forcing cython
from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport
elif use_cython is None:
# Regular: use fallback if not found and give message to user
try:
from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips
def trace_dispatch(py_db, frame, event, arg):
return _trace_dispatch(py_db, frame, event, arg)
# This version number is always available
from _pydevd_bundle.pydevd_additional_thread_info_regular import version as regular_version
# This version number from the already compiled cython extension
from _pydevd_bundle.pydevd_cython_wrapper import version as cython_version
if cython_version != regular_version:
delete_old_compiled_extensions()
raise ImportError()
except ImportError:
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport
from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport
from _pydev_bundle.pydev_monkey import log_error_once
log_error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % (
sys.executable, os.path.join(dirname, 'setup_cython.py')))
else:
raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,))
|
{
"content_hash": "e4d7393b4959fef8809ab65fb6335636",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 142,
"avg_line_length": 50.20289855072464,
"alnum_prop": 0.6830254041570438,
"repo_name": "signed/intellij-community",
"id": "18f579685706cdcc7374de37bc67a84360623e28",
"size": "3596",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/helpers/pydev/_pydevd_bundle/pydevd_trace_dispatch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "20665"
},
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "Batchfile",
"bytes": "60477"
},
{
"name": "C",
"bytes": "195247"
},
{
"name": "C#",
"bytes": "1264"
},
{
"name": "C++",
"bytes": "195243"
},
{
"name": "CMake",
"bytes": "1675"
},
{
"name": "CSS",
"bytes": "201445"
},
{
"name": "CoffeeScript",
"bytes": "1759"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Gherkin",
"bytes": "14382"
},
{
"name": "Groovy",
"bytes": "3098586"
},
{
"name": "HLSL",
"bytes": "57"
},
{
"name": "HTML",
"bytes": "1839859"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "160881504"
},
{
"name": "JavaScript",
"bytes": "570364"
},
{
"name": "Jupyter Notebook",
"bytes": "93222"
},
{
"name": "Kotlin",
"bytes": "2882191"
},
{
"name": "Lex",
"bytes": "184230"
},
{
"name": "Makefile",
"bytes": "2352"
},
{
"name": "NSIS",
"bytes": "49890"
},
{
"name": "Objective-C",
"bytes": "27941"
},
{
"name": "Perl",
"bytes": "903"
},
{
"name": "Perl6",
"bytes": "26"
},
{
"name": "Protocol Buffer",
"bytes": "6639"
},
{
"name": "Python",
"bytes": "24063672"
},
{
"name": "Roff",
"bytes": "35232"
},
{
"name": "Ruby",
"bytes": "1217"
},
{
"name": "Scala",
"bytes": "11698"
},
{
"name": "Shell",
"bytes": "63392"
},
{
"name": "Smalltalk",
"bytes": "338"
},
{
"name": "TeX",
"bytes": "25473"
},
{
"name": "Thrift",
"bytes": "1846"
},
{
"name": "TypeScript",
"bytes": "9469"
},
{
"name": "Visual Basic",
"bytes": "77"
},
{
"name": "XSLT",
"bytes": "113040"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from datetime import datetime
from django.core.urlresolvers import reverse
from sentry.models import Release, ReleaseCommit
from sentry.testutils import APITestCase
class ProjectReleaseListTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team()
project1 = self.create_project(team=team, name='foo')
project2 = self.create_project(team=team, name='bar')
release1 = Release.objects.create(
project=project1,
version='1',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
release2 = Release.objects.create(
project=project1,
version='2',
date_added=datetime(2013, 8, 14, 3, 8, 24, 880386),
)
release3 = Release.objects.create(
project=project1,
version='3',
date_added=datetime(2013, 8, 12, 3, 8, 24, 880386),
date_released=datetime(2013, 8, 15, 3, 8, 24, 880386),
)
Release.objects.create(
project=project2,
version='1',
)
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project1.organization.slug,
'project_slug': project1.slug,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 3
assert response.data[0]['version'] == release3.version
assert response.data[1]['version'] == release2.version
assert response.data[2]['version'] == release1.version
def test_query_filter(self):
self.login_as(user=self.user)
team = self.create_team()
project = self.create_project(team=team, name='foo')
release = Release.objects.create(
project=project,
version='foobar',
date_added=datetime(2013, 8, 13, 3, 8, 24, 880386),
)
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.get(url + '?query=foo', format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert response.data[0]['version'] == release.version
response = self.client.get(url + '?query=bar', format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 0
class ProjectReleaseCreateTest(APITestCase):
def test_minimal(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.post(url, data={
'version': '1.2.1',
})
assert response.status_code == 201, response.content
assert response.data['version']
release = Release.objects.get(
project=project,
version=response.data['version'],
)
assert not release.owner
def test_duplicate(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
Release.objects.create(version='1.2.1', project=project)
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.post(url, data={
'version': '1.2.1',
})
assert response.status_code == 208, response.content
def test_version_whitespace(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.post(url, data={
'version': '1.2.3\n',
})
assert response.status_code == 400, response.content
response = self.client.post(url, data={
'version': '\n1.2.3',
})
assert response.status_code == 400, response.content
response = self.client.post(url, data={
'version': '1.\n2.3',
})
assert response.status_code == 400, response.content
response = self.client.post(url, data={
'version': '1.2.3\f',
})
assert response.status_code == 400, response.content
response = self.client.post(url, data={
'version': '1.2.3\t',
})
assert response.status_code == 400, response.content
response = self.client.post(url, data={
'version': '1.2.3',
})
assert response.status_code == 201, response.content
assert response.data['version'] == '1.2.3'
release = Release.objects.get(
project=project,
version=response.data['version'],
)
assert not release.owner
def test_features(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.post(url, data={
'version': '1.2.1',
'owner': self.user.email,
})
assert response.status_code == 201, response.content
assert response.data['version']
release = Release.objects.get(
project=project,
version=response.data['version'],
)
assert release.owner == self.user
def test_commits(self):
self.login_as(user=self.user)
project = self.create_project(name='foo')
url = reverse('sentry-api-0-project-releases', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.post(url, data={
'version': '1.2.1',
'commits': [
{'id': 'a' * 40},
{'id': 'b' * 40},
]
})
assert response.status_code == 201, (response.status_code, response.content)
assert response.data['version']
release = Release.objects.get(
project=project,
version=response.data['version'],
)
rc_list = list(ReleaseCommit.objects.filter(
release=release,
).select_related('commit', 'commit__author').order_by('order'))
assert len(rc_list) == 2
|
{
"content_hash": "cba74cab0957a3a5344ab771c2acf6d9",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 84,
"avg_line_length": 31.610091743119266,
"alnum_prop": 0.5694383979103178,
"repo_name": "alexm92/sentry",
"id": "d460fcf1eab7cdc7e99b5d106e3fea8ceaa08d5d",
"size": "6891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sentry/api/endpoints/test_project_releases.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "156715"
},
{
"name": "HTML",
"bytes": "191265"
},
{
"name": "JavaScript",
"bytes": "457236"
},
{
"name": "Makefile",
"bytes": "4689"
},
{
"name": "Python",
"bytes": "7262450"
}
],
"symlink_target": ""
}
|
"""
lossy_flow_accumulator.py: Component to accumulate flow and calc drainage area,
while permitting gain or loss of discharge during flow.
DEJH, late 2018
"""
import sys
from landlab.components.flow_accum import (
FlowAccumulator,
flow_accum_bw,
flow_accum_to_n,
)
if sys.version_info[0] >= 3:
from inspect import signature
class LossyFlowAccumulator(FlowAccumulator):
"""Component to calculate drainage area and accumulate flow, while
permitting dynamic loss or gain of flow downstream.
This component is closely related to the FlowAccumulator, in that
this is accomplished by first finding flow directions by a user-specified
method and then calculating the drainage area and discharge. However,
this component additionally requires the passing of a function that
describes how discharge is lost or gained downstream,
f(Qw, nodeID, linkID, grid). See the Examples below to see how this works
in practice.
Optionally, spatially variable runoff can be set either by the model grid
field 'water__unit_flux_in' or the input variable *runoff_rate**.
Optionally a depression finding component can be specified and flow
directing, depression finding, and flow routing can all be accomplished
together. Note that the DepressionFinderAndRouter is not particularly
intelligent when running on lossy streams, and in particular, it will
reroute flow around pits even when they are in fact not filled due to loss.
NOTE: The perimeter nodes NEVER contribute to the accumulating flux, even
if the gradients from them point inwards to the main body of the grid.
This is because under Landlab definitions, perimeter nodes lack cells, so
cannot accumulate any discharge.
LossyFlowAccumulator stores as ModelGrid fields:
- Node array of drainage areas: *'drainage_area'*
- Node array of discharges: *'surface_water__discharge'*
- Node array of discharge loss in transit (vol/sec). This is the
total loss across all of the downstream links:
*'surface_water__discharge_loss'*
- Node array containing downstream-to-upstream ordered list of node
IDs: *'flow__upstream_node_order'*
- Node array of all but the first element of the delta data structure:
*flow__data_structure_delta*. The first element is always zero.
The FlowDirector component will add additional ModelGrid fields; see the
`FlowAccumulator component <https://landlab.readthedocs.io/en/release/reference/components/flow_accum.html>`_
for full details. These are:
- Node array of receivers (nodes that receive flow), or ITS OWN ID if
there is no receiver: *'flow__receiver_node'*
- Node array of flow proportions: *'flow__receiver_proportions'*
- Node array of links carrying flow: *'flow__link_to_receiver_node'*
- Node array of downhill slopes from each receiver:
*'topographic__steepest_slope'*
- Boolean node array of all local lows: *'flow__sink_flag'*
The primary method of this class is :func:`run_one_step`.
Examples
--------
These examples pertain only to the LossyFlowAccumulator. See the main
FlowAccumulator documentation for more generic and comprehensive examples.
First, a very simple example. Here's a 50% loss of discharge every time
flow moves along a node:
>>> import numpy as np
>>> from landlab import RasterModelGrid, HexModelGrid
>>> from landlab.components import FlowDirectorSteepest
>>> from landlab.components import DepressionFinderAndRouter
>>> mg = RasterModelGrid((3, 5), xy_spacing=(2, 1))
>>> mg.set_closed_boundaries_at_grid_edges(True, True, False, True)
>>> z = mg.add_field("topographic__elevation", mg.node_x + mg.node_y, at="node")
>>> def mylossfunction(qw):
... return 0.5 * qw
>>> fa = LossyFlowAccumulator(mg, 'topographic__elevation',
... flow_director=FlowDirectorSteepest,
... loss_function=mylossfunction)
>>> fa.run_one_step()
>>> mg.at_node['drainage_area'].reshape(mg.shape)
array([[ 0., 0., 0., 0., 0.],
[ 6., 6., 4., 2., 0.],
[ 0., 0., 0., 0., 0.]])
>>> mg.at_node['surface_water__discharge'].reshape(mg.shape)
array([[ 0. , 0. , 0. , 0. , 0. ],
[ 1.75, 3.5 , 3. , 2. , 0. ],
[ 0. , 0. , 0. , 0. , 0. ]])
>>> mg.at_node['surface_water__discharge_loss'].reshape(mg.shape)
array([[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 1.75, 1.5 , 1. , 0. ],
[ 0. , 0. , 0. , 0. , 0. ]])
Here we use a spatially distributed field to derive loss terms, and also
use a filled, non-raster grid.
>>> dx=(2./(3.**0.5))**0.5 # area to be 100.
>>> hmg = HexModelGrid((5, 3), spacing=dx, xy_of_lower_left=(-1.0745, 0.))
>>> z = hmg.add_field(
... "topographic__elevation",
... hmg.node_x**2 + np.round(hmg.node_y)**2,
... at="node",
... )
>>> z[9] = -10. # poke a hole
>>> lossy = hmg.add_zeros('node', 'mylossterm', dtype=float)
>>> lossy[14] = 1. # suppress all flow from node 14
Without loss looks like this:
>>> fa = LossyFlowAccumulator(hmg, 'topographic__elevation',
... flow_director=FlowDirectorSteepest,
... depression_finder=DepressionFinderAndRouter)
>>> fa.run_one_step()
>>> hmg.at_node['flow__receiver_node']
array([ 0, 1, 2,
3, 0, 9, 6,
7, 9, 4, 9, 11,
12, 9, 9, 15,
16, 17, 18])
>>> np.round(hmg.at_node['drainage_area'])
array([ 7., 0., 0.,
0., 7., 1., 0.,
0., 1., 6., 1., 0.,
0., 1., 1., 0.,
0., 0., 0.])
>>> np.round(hmg.at_node['surface_water__discharge'])
array([ 7., 0., 0.,
0., 7., 1., 0.,
0., 1., 6., 1., 0.,
0., 1., 1., 0.,
0., 0., 0.])
With loss looks like this:
>>> def mylossfunction2(Qw, nodeID, linkID, grid):
... return (1. - grid.at_node['mylossterm'][nodeID]) * Qw
>>> fa = LossyFlowAccumulator(hmg, 'topographic__elevation',
... flow_director=FlowDirectorSteepest,
... depression_finder=DepressionFinderAndRouter,
... loss_function=mylossfunction2)
>>> fa.run_one_step()
>>> np.round(hmg.at_node['drainage_area'])
array([ 7., 0., 0.,
0., 7., 1., 0.,
0., 1., 6., 1., 0.,
0., 1., 1., 0.,
0., 0., 0.])
>>> np.round(hmg.at_node['surface_water__discharge'])
array([ 6., 0., 0.,
0., 6., 1., 0.,
0., 1., 5., 1., 0.,
0., 1., 1., 0.,
0., 0., 0.])
>>> np.allclose(
... hmg.at_node["surface_water__discharge_loss"],
... lossy * hmg.at_node["surface_water__discharge"],
... )
True
(Loss is only happening from the node, 14, that we set it to happen at.)
Finally, note we can use the linkIDs to create flow-length-dependent
effects:
>>> from landlab.components import FlowDirectorMFD
>>> mg = RasterModelGrid((4, 6), xy_spacing=(1, 2))
>>> mg.set_closed_boundaries_at_grid_edges(True, True, False, True)
>>> z = mg.add_field("topographic__elevation", 2.0 * mg.node_x, at="node")
>>> z[9] = 8.
>>> z[16] = 6.5 # force the first node sideways
>>> L = mg.add_zeros('node', 'spatialloss')
>>> mg.at_node['spatialloss'][9] = 1.
>>> mg.at_node['spatialloss'][13] = 1.
>>> def fancyloss(Qw, nodeID, linkID, grid):
... # now a true transmission loss:
... Lt = (1.0 - 1.0 / grid.length_of_link[linkID] ** 2)
... Lsp = grid.at_node["spatialloss"][nodeID]
... return Qw * (1.0 - Lt) * (1.0 - Lsp)
>>> fa = LossyFlowAccumulator(
... mg,
... "topographic__elevation",
... flow_director=FlowDirectorMFD,
... loss_function=fancyloss,
... )
>>> fa.run_one_step()
>>> mg.at_node['drainage_area'].reshape(mg.shape)
array([[ 0. , 0. , 0. , 0. , 0. , 0. ],
[ 5.6, 5.6, 3.6, 2. , 2. , 0. ],
[ 10.4, 10.4, 8.4, 6.4, 4. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. ]])
>>> mg.at_node['surface_water__discharge'].reshape(mg.shape)
array([[ 0. , 0. , 0. , 0. , 0. , 0. ],
[ 4. , 4. , 2. , 2. , 2. , 0. ],
[ 0. , 8.5, 6.5, 4.5, 2.5, 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. ]])
References
----------
**Required Software Citation(s) Specific to this Component**
None Listed
**Additional References**
Braun, J., Willett, S. (2013). A very efficient O(n), implicit and parallel
method to solve the stream power equation governing fluvial incision and
landscape evolution. Geomorphology 180-181(C), 170-179.
https://dx.doi.org/10.1016/j.geomorph.2012.10.008
"""
_name = "LossyFlowAccumulator"
_info = {
"drainage_area": {
"dtype": float,
"intent": "out",
"optional": False,
"units": "m**2",
"mapping": "node",
"doc": "Upstream accumulated surface area contributing to the node's discharge",
},
"flow__data_structure_delta": {
"dtype": int,
"intent": "out",
"optional": False,
"units": "-",
"mapping": "node",
"doc": "Node array containing the elements delta[1:] of the data structure 'delta' used for construction of the downstream-to-upstream node array",
},
"flow__upstream_node_order": {
"dtype": int,
"intent": "out",
"optional": False,
"units": "-",
"mapping": "node",
"doc": "Node array containing downstream-to-upstream ordered list of node IDs",
},
"surface_water__discharge": {
"dtype": float,
"intent": "out",
"optional": False,
"units": "m**3/s",
"mapping": "node",
"doc": "Volumetric discharge of surface water",
},
"surface_water__discharge_loss": {
"dtype": float,
"intent": "out",
"optional": False,
"units": "m**3/s",
"mapping": "node",
"doc": "Total volume of water per second lost during all flow out of the node",
},
"topographic__elevation": {
"dtype": float,
"intent": "in",
"optional": True,
"units": "m",
"mapping": "node",
"doc": "Land surface topographic elevation",
},
"water__unit_flux_in": {
"dtype": float,
"intent": "in",
"optional": True,
"units": "m/s",
"mapping": "node",
"doc": "External volume water per area per time input to each node (e.g., rainfall rate)",
},
}
def __init__(
self,
grid,
surface="topographic__elevation",
flow_director="FlowDirectorSteepest",
runoff_rate=None,
depression_finder=None,
loss_function=None,
**kwargs
):
"""Initialize the FlowAccumulator component.
Saves the grid, tests grid type, tests imput types and
compatability for the flow_director and depression_finder
keyword arguments, tests the argument of runoff_rate, and
initializes new fields.
Parameters
----------
grid : ModelGrid
A Landlab grid.
surface : field name at node or array of length node
The surface to direct flow across.
flow_director : string, class, instance of class.
A string of method or class name (e.g. 'D8' or 'FlowDirectorD8'), an
uninstantiated FlowDirector class, or an instance of a FlowDirector
class. This sets the method used to calculate flow directions.
Default is 'FlowDirectorSteepest'
runoff_rate : field name, array, or float, optional (m/time)
If provided, sets the runoff rate and will be assigned to the grid
field 'water__unit_flux_in'. If a spatially and and temporally variable
runoff rate is desired, pass this field name and update the field
through model run time. If both the field and argument are present at
the time of initialization, runoff_rate will *overwrite* the field. If
neither are set, defaults to spatially constant unit input.
depression_finder : string, class, instance of class, optional
A string of class name (e.g., 'DepressionFinderAndRouter'), an
uninstantiated DepressionFinder class, or an instance of a
DepressionFinder class.
This sets the method for depression finding.
loss_function : Python function, optional
A function of the form f(Qw, [node_ID, [linkID, [grid]]]), where Qw is
the discharge at a node, node_ID the ID of the node at which the loss
is to be calculated, linkID is the ID of the link down which the
outflow drains (or a d8 ID if the routing is d8), and grid is a Landlab
ModelGrid. The function then returns the new discharge at the node
after the function is applied.
Note that if a linkID is needed, a nodeID must also be specified, even
if only as a dummy parameter; similarly, if a grid is to be passed, all
of the preceding parameters must be specified. Both nodeID and linkID
are required to permit spatially variable losses, and also losses
dependent on flow path geometry (e.g., flow length). The grid is passed
to allow fields or grid properties describing values across the grid
to be accessed for the loss calculation (see examples).
This function expects (float, [int, [int, [ModelGrid]]]), and
return a single float, the new discharge value. This behavior is
verified during component instantiation.
**kwargs : optional
Any additional parameters to pass to a FlowDirector or
DepressionFinderAndRouter instance (e.g., partion_method for
FlowDirectorMFD). This will have no effect if an instantiated
component is passed using the flow_director or depression_finder
keywords.
"""
# add the new loss discharge field if necessary:
if "surface_water__discharge_loss" not in grid.at_node:
grid.add_zeros(
"node", "surface_water__discharge_loss", dtype=float, clobber=True
)
super().__init__(
grid,
surface=surface,
flow_director=flow_director,
runoff_rate=runoff_rate,
depression_finder=depression_finder,
**kwargs
)
if loss_function is not None:
if sys.version_info[0] >= 3:
sig = signature(loss_function)
num_params = len(sig.parameters)
else: # Python 2
num_params = loss_function.func_code.co_argcount
# save the func for loss, and do a quick test on its inputs:
if num_params == 1:
# check the func takes a single value and turns it into a new
# single value:
if not isinstance(loss_function(1.0), float):
raise TypeError(
"The loss_function should take a float, and return " "a float."
)
# now, for logical consistency in our calls to
# find_drainage_area_and_discharge, wrap the func so it has two
# arguments:
def lossfunc(Qw, dummyn, dummyl, dummygrid):
return float(loss_function(Qw))
self._lossfunc = lossfunc
elif num_params == 2:
# check the func takes a single value and turns it into a new
# single value:
if not isinstance(loss_function(1.0, 0), float):
raise TypeError(
"The loss_function should take (float, int), and "
"return a float."
)
# now, for logical consistency in our calls to
# find_drainage_area_and_discharge, wrap the func so it has two
# arguments:
def lossfunc(Qw, nodeID, dummyl, dummygrid):
return float(loss_function(Qw, nodeID))
self._lossfunc = lossfunc
elif num_params == 3:
# check the func takes (float, int) and turns it into a new
# single value:
if not isinstance(loss_function(1.0, 0, 0), float):
raise TypeError(
"The loss_function should take (float, int, int), "
"and return a float."
)
def lossfunc(Qw, nodeID, linkID, dummygrid):
return float(loss_function(Qw, nodeID, linkID))
self._lossfunc = lossfunc
elif num_params == 4:
# this time, the test is too hard to implement cleanly so just
self._lossfunc = loss_function
else:
raise ValueError(
"The loss_function must have only a single argument, "
"which should be the discharge at a node; a pair of "
"arguments, which should be the discharge at a node and "
"the node ID; or three arguments, which should be the "
"discharge at a node, the node ID, and the link along "
"which that discharge will flow."
)
else:
# make a dummy
def lossfunc(Qw, dummyn, dummyl, dummygrid):
return float(Qw)
self._lossfunc = lossfunc
def _accumulate_A_Q_to_one(self, s, r):
"""Accumulate area and discharge for a route-to-one scheme."""
link = self._grid.at_node["flow__link_to_receiver_node"]
a, q = flow_accum_bw.find_drainage_area_and_discharge_lossy(
s,
r,
link,
self._lossfunc,
self._grid,
self._node_cell_area,
self._grid.at_node["water__unit_flux_in"],
)
return a, q
def _accumulate_A_Q_to_n(self, s, r, p):
"""Accumulate area and discharge for a route-to-one scheme."""
link = self._grid.at_node["flow__link_to_receiver_node"]
a, q = flow_accum_to_n.find_drainage_area_and_discharge_to_n_lossy(
s,
r,
link,
p,
self._lossfunc,
self._grid,
self._node_cell_area,
self._grid.at_node["water__unit_flux_in"],
)
return a, q
if __name__ == "__main__": # pragma: no cover
import doctest
doctest.testmod()
|
{
"content_hash": "ebbc31137cabdb50a056a11f09e694c9",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 159,
"avg_line_length": 40.5966735966736,
"alnum_prop": 0.5503661596763456,
"repo_name": "landlab/landlab",
"id": "cd8cfea75a4850cdbaddc1f88450dfac05ef5cff",
"size": "19546",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "landlab/components/flow_accum/lossy_flow_accumulator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "762"
},
{
"name": "Cython",
"bytes": "265735"
},
{
"name": "Gherkin",
"bytes": "1601"
},
{
"name": "Jupyter Notebook",
"bytes": "1373117"
},
{
"name": "Makefile",
"bytes": "2250"
},
{
"name": "Python",
"bytes": "4497175"
},
{
"name": "Roff",
"bytes": "445"
},
{
"name": "Shell",
"bytes": "1073"
},
{
"name": "TeX",
"bytes": "42252"
}
],
"symlink_target": ""
}
|
from os.path import join, dirname
from ithz.lib import template
from ithz.utils import u
templatedir = join(dirname(dirname(__file__)),"templates")
def getTemplate(name,values):
return u(template.render(join(templatedir, name), values))
|
{
"content_hash": "82a731b8ccb8868e48ce679f7fa05628",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 62,
"avg_line_length": 34.57142857142857,
"alnum_prop": 0.7603305785123967,
"repo_name": "ergoithz/ithz",
"id": "f49bd7c1f3ceff0ca769c4aaab1b9657363fa4c4",
"size": "242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ithz/template.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13947"
},
{
"name": "JavaScript",
"bytes": "66769"
},
{
"name": "Python",
"bytes": "267922"
}
],
"symlink_target": ""
}
|
from django.db.models import Q
from django.core.exceptions import ValidationError
from cms.apphook_pool import apphook_pool
from cms.models import Page
from menus.base import Menu
class CMSAttachMenu(Menu):
cms_enabled = True
instance = None
name = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.cms_enabled and not self.name:
raise ValidationError(
"the menu %s is a CMSAttachMenu but has no name defined!" %
self.__class__.__name__)
@classmethod
def get_apphooks(cls):
"""
Returns a list of apphooks to which this CMSAttachMenu is attached.
Calling this does NOT produce DB queries.
"""
apps = []
for key, _ in apphook_pool.get_apphooks():
app = apphook_pool.get_apphook(key)
if cls in app.get_menus():
apps.append(app)
return apps
@classmethod
def get_instances(cls):
"""
Return a list (queryset, really) of all CMS Page objects (in this case)
that are currently using this CMSAttachMenu either directly as a
navigation_extender, or, as part of an apphook.
Calling this DOES perform a DB query.
"""
parent_apps = []
for app in cls.get_apphooks():
parent_apps.append(app.__class__.__name__)
return Page.objects.filter(
Q(application_urls__in=parent_apps) | Q(navigation_extenders=cls.__name__)
)
|
{
"content_hash": "ca6d77fe0a6f921dda3ef51800c5ba9b",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 86,
"avg_line_length": 30.72,
"alnum_prop": 0.5963541666666666,
"repo_name": "divio/django-cms",
"id": "444b18942db0a1c9a6b2152fd9db19520d73992a",
"size": "1536",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cms/menu_bases.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "132972"
},
{
"name": "HTML",
"bytes": "201508"
},
{
"name": "JavaScript",
"bytes": "1238070"
},
{
"name": "Python",
"bytes": "2360702"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
}
|
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
import logging
# Third-party
import numpy as np
import astropy.units as u
__all__ = ["BasePrior", "UniformPrior", "LogarithmicPrior", "NormalPrior"]
logger = logging.getLogger(__name__)
class BasePrior(object):
def pdf(self, value):
return 1.
def logpdf(self, value):
return 0.
def sample(self, n=None):
"""
Sample from this prior. The returned array axis=0 is the
sample axis.
Parameters
----------
n : int (optional)
Number of samples to draw
"""
raise ValueError("Cannot sample from a BasePrior object.")
def __str__(self):
return "<BasePrior>"
def __repr__(self):
return self.__str__()
class UniformPrior(BasePrior):
def __init__(self, a, b):
""" Uniform distribution. Returns 0 if value is outside of the
ND hyperrectangle defined by the (vectors) a, b. Returns
the properly normalized constant prod(1/(b-a)) otherwise.
Parameters
----------
a : numeric, quantity_like, array_like
Lower bound.
b : numeric, quantity_like, array_like
Lower bound.
"""
self.a = np.atleast_1d(a)
self.b = np.atleast_1d(b)
if self.a.shape != self.b.shape:
raise ValueError("Shape of 'a' must match shape of 'b'.")
if self.a.ndim > 1:
raise ValueError("Only one dimensional distributions supported.")
def pdf(self, x):
x = np.atleast_1d(x)
p = np.zeros_like(x)
ix = (x < self.a) | (x > self.b)
p[ix] = 0.
p[~ix] = (1 / (self.b - self.a))[~ix]
return np.squeeze(p)
def logpdf(self, x):
x = np.atleast_1d(x)
p = np.zeros_like(x)
ix = (x < self.a) | (x > self.b)
p[ix] = -np.inf
p[~ix] = (-np.log(self.b - self.a))[~ix]
return np.squeeze(p)
def sample(self, n=None):
"""
Sample from this prior. The returned array axis=0 is the
sample axis.
Parameters
----------
n : int (optional)
Number of samples to draw
"""
if n is not None and self.a.size > 1:
return np.random.uniform(self.a, self.b, size=(n,self.a.size))
elif n is not None and self.a.size == 1:
return np.random.uniform(self.a, self.b, size=(n,))
else:
return np.random.uniform(self.a, self.b)
def __str__(self):
return "<Uniform a={}, b={}>".format(self.a, self.b)
class LogarithmicPrior(BasePrior):
def __init__(self, a, b):
""" Logarithmic (scale-invariant) prior. Returns 0 if value is
outside of the range defined by a < value < b. Otherwise,
returns ln(b/a)/value.
Parameters
----------
a : numeric, quantity_like, array_like
Lower bound.
b : numeric, quantity_like, array_like
Lower bound.
"""
self.a = np.atleast_1d(a)
self.b = np.atleast_1d(b)
if self.a.shape != self.b.shape:
raise ValueError("Shape of 'a' must match shape of 'b'.")
if self.a.ndim > 1:
raise ValueError("Only one dimensional distributions supported.")
def pdf(self, x):
x = np.atleast_1d(x)
p = np.zeros_like(x)
ix = (x < self.a) | (x > self.b)
p[ix] = 0.
p[~ix] = (1 / np.log(self.b / self.a))[~ix]
return np.squeeze(p)
def logpdf(self, x):
x = np.atleast_1d(x)
p = np.zeros_like(x)
ix = (x < self.a) | (x > self.b)
p[ix] = -np.inf
p[~ix] = (np.log(1. / np.log(self.b/self.a)))[~ix]
return np.squeeze(p)
def sample(self, n=None):
"""
Sample from this prior. The returned array axis=0 is the
sample axis.
Parameters
----------
n : int (optional)
Number of samples to draw
"""
if n is not None and self.a.size > 1:
return np.exp(np.random.uniform(self.a, self.b, size=(n,self.a.size)))
elif n is not None and self.a.size == 1:
return np.exp(np.random.uniform(self.a, self.b, size=(n,)))
else:
return np.exp(np.random.uniform(self.a, self.b))
def __str__(self):
return "<Logarithmic a={}, b={}>".format(self.a, self.b)
class NormalPrior(BasePrior):
def __init__(self, mean, stddev):
""" Normal (Gaussian) prior.
Parameters
----------
mean : numeric, quantity_like, array_like
Mean of the distribution.
stddev : numeric, quantity_like, array_like
Standard of deviation / square root of variance.
"""
self.mean = np.atleast_1d(mean)
self.stddev = np.atleast_1d(stddev)
self._norm = -0.5*np.log(2*np.pi) - np.log(self.stddev)
def __str__(self):
return "<Normal μ={}, σ={}>".format(self.mean, self.stddev)
def pdf(self, x):
return np.exp(self.logpdf(x))
def logpdf(self, x):
x = np.atleast_1d(x)
xx = self.mean - x
return self._norm - 0.5*(xx / self.stddev)**2
def sample(self, n=None):
""" Sample from this prior. The returned array axis=0 is the
sample axis.
Parameters
----------
n : int (optional)
Number of samples to draw
"""
if n is not None and self.mean.size > 1:
return np.random.normal(self.mean, self.stddev, size=(n,self.mean.size))
elif n is not None and self.mean.size == 1:
return np.random.normal(self.mean, self.stddev, size=(n,))
else:
return np.random.normal(self.mean, self.stddev)
|
{
"content_hash": "618f6fc285116bd4fc3770865eb19930",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 84,
"avg_line_length": 28.28301886792453,
"alnum_prop": 0.5223482321547699,
"repo_name": "abonaca/gary",
"id": "59f739e46b48f86df28d3a51f1d4808c962511ae",
"size": "6015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gary/inference/prior.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "67332"
},
{
"name": "C++",
"bytes": "7004"
},
{
"name": "Python",
"bytes": "490956"
}
],
"symlink_target": ""
}
|
import os
import sys
import json
import gpu_path_util
from gpu_tests import gpu_integration_test
html_path = os.path.join(gpu_path_util.CHROMIUM_SRC_DIR, 'content', 'test',
'data', 'gpu', 'webcodecs')
data_path = os.path.join(gpu_path_util.CHROMIUM_SRC_DIR, 'media', 'test',
'data')
four_colors_img_path = os.path.join(data_path, 'four-colors.y4m')
frame_sources = [
'camera', 'capture', 'offscreen', 'arraybuffer', 'hw_decoder', 'sw_decoder'
]
codecs = ['avc1.42001E', 'vp8', 'vp09.00.10.08', 'av01.0.04M.08']
accelerations = ['prefer-hardware', 'prefer-software']
class WebCodecsIntegrationTest(gpu_integration_test.GpuIntegrationTest):
@classmethod
def Name(cls):
return 'webcodecs'
# pylint: disable=too-many-branches
@classmethod
def GenerateGpuTests(cls, options):
for source_type in frame_sources:
yield ('WebCodecs_DrawImage_' + source_type, 'draw-image.html', ({
'source_type':
source_type
}))
yield ('WebCodecs_TexImage2d_' + source_type, 'tex-image-2d.html', ({
'source_type':
source_type
}))
yield ('WebCodecs_copyTo_' + source_type, 'copyTo.html', ({
'source_type':
source_type
}))
for codec in codecs:
yield ('WebCodecs_EncodeDecode_' + codec, 'encode-decode.html', ({
'codec':
codec
}))
for source_type in frame_sources:
for codec in codecs:
for acc in accelerations:
args = (source_type, codec, acc)
yield ('WebCodecs_Encode_%s_%s_%s' % args, 'encode.html', ({
'source_type':
source_type,
'codec':
codec,
'acceleration':
acc
}))
for codec in codecs:
for acc in accelerations:
for bitrate_mode in ['constant', 'variable']:
for latency_mode in ['realtime', 'quality']:
source_type = 'offscreen'
args = (source_type, codec, acc, bitrate_mode, latency_mode)
yield ('WebCodecs_EncodingModes_%s_%s_%s_%s_%s' % args,
'encoding-modes.html', ({
'source_type': source_type,
'codec': codec,
'acceleration': acc,
'bitrate_mode': bitrate_mode,
'latency_mode': latency_mode
}))
for codec in codecs:
for layers in [2, 3]:
args = (codec, layers)
yield ('WebCodecs_SVC_%s_layers_%d' % args, 'svc.html', ({
'codec':
codec,
'layers':
layers
}))
for codec in codecs:
for acc in accelerations:
args = (codec, acc)
yield ('WebCodecs_EncodeColorSpace_%s_%s' % args,
'encode-color-space.html', ({
'codec': codec,
'acceleration': acc
}))
# pylint: enable=too-many-branches
def RunActualGpuTest(self, test_path, *args):
url = self.UrlOfStaticFilePath(html_path + '/' + test_path)
tab = self.tab
arg_obj = args[0]
os_name = self.platform.GetOSName()
arg_obj['validate_camera_frames'] = self.CameraCanShowFourColors(os_name)
tab.Navigate(url)
tab.action_runner.WaitForJavaScriptCondition(
'document.readyState == "complete"')
tab.EvaluateJavaScript('TEST.run(' + json.dumps(arg_obj) + ')')
tab.action_runner.WaitForJavaScriptCondition('TEST.finished', timeout=60)
if tab.EvaluateJavaScript('TEST.skipped'):
self.skipTest('Skipping test:' + tab.EvaluateJavaScript('TEST.summary()'))
if not tab.EvaluateJavaScript('TEST.success'):
self.fail('Test failure:' + tab.EvaluateJavaScript('TEST.summary()'))
@staticmethod
def CameraCanShowFourColors(os_name):
return os_name not in ('android', 'chromeos')
@classmethod
def SetUpProcess(cls):
super(WebCodecsIntegrationTest, cls).SetUpProcess()
args = [
'--use-fake-device-for-media-stream',
'--use-fake-ui-for-media-stream',
]
# If we don't call CustomizeBrowserArgs cls.platform is None
cls.CustomizeBrowserArgs(args)
platform = cls.platform
if cls.CameraCanShowFourColors(platform.GetOSName()):
args.append('--use-file-for-fake-video-capture=' + four_colors_img_path)
cls.CustomizeBrowserArgs(args)
cls.StartBrowser()
cls.SetStaticServerDirs([html_path, data_path])
@classmethod
def ExpectationsFiles(cls):
return [
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_expectations', 'webcodecs_expectations.txt')
]
def load_tests(loader, tests, pattern):
del loader, tests, pattern # Unused.
return gpu_integration_test.LoadAllTestsInModule(sys.modules[__name__])
|
{
"content_hash": "4a5d80f211b2a3227c7c52491f90560a",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 80,
"avg_line_length": 32.93877551020408,
"alnum_prop": 0.5888062783973564,
"repo_name": "scheib/chromium",
"id": "c44cd0fa75fc4a60db1078b9cf2cef43fd95eb5e",
"size": "5005",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "content/test/gpu/gpu_tests/webcodecs_integration_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from django import forms
|
{
"content_hash": "8e8c8c3956e9257db780238b69f38ba0",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 24,
"avg_line_length": 13,
"alnum_prop": 0.8076923076923077,
"repo_name": "ashwoods/django-double-taggit",
"id": "7112ef9333c1703f624998863783bfdc81634434",
"size": "1066",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "double_taggit/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "15763"
},
{
"name": "Python",
"bytes": "25509"
},
{
"name": "Shell",
"bytes": "4569"
}
],
"symlink_target": ""
}
|
from typing import Type, Dict
class SsaContext():
"""
:ivar objCnt: the dictionary of object counts used for name generating
"""
def __init__(self):
self.objCnt = 0 # : Dict[Type, int] = {}
def genName(self, obj):
prefix = getattr(obj, "_GEN_NAME_PREFIX", "o")
i = self.objCnt # .get(obj.__class__, 0)
self.objCnt = i + 1 # [obj.__class__] = i
return f"{prefix}{i:d}"
|
{
"content_hash": "06f2716a2cff388ec8a4fafd4aa45a29",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 74,
"avg_line_length": 27.1875,
"alnum_prop": 0.542528735632184,
"repo_name": "Nic30/hwtHls",
"id": "0ebeb7499db3361d295668cde33702a27d54e071",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hwtHls/ssa/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1001"
},
{
"name": "C++",
"bytes": "131805"
},
{
"name": "Dockerfile",
"bytes": "1731"
},
{
"name": "LLVM",
"bytes": "74517"
},
{
"name": "Meson",
"bytes": "2683"
},
{
"name": "Python",
"bytes": "739246"
}
],
"symlink_target": ""
}
|
"""Test Utils for Forseti unit tests."""
from builtins import range
import collections
import contextlib
import json
import logging
import os
import tempfile
import unittest
import socket
import sys
from google.cloud.forseti.common.util import logger
def get_available_port():
"""Get a port that is available to use"""
sckt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sckt.bind(("",0))
sckt.listen(1)
port = sckt.getsockname()[1]
sckt.close()
return port
@contextlib.contextmanager
def create_temp_file(data):
if not isinstance(data, bytes):
data = data.encode()
temp = tempfile.NamedTemporaryFile(delete=False)
temp.write(data)
temp.close()
try:
yield temp.name
finally:
os.unlink(temp.name)
class ForsetiTestCase(unittest.TestCase):
"""Forseti base class for tests."""
def __init__(self, *args, **kwargs):
super(ForsetiTestCase, self,).__init__(*args, **kwargs)
logger.set_logger_level(logging.ERROR)
def setUp(self):
# Disable ResourceWarning messages in tests, be sure to call super in
# child classes as this gets reset for every test.
# See https://docs.python.org/3/library/warnings.html#overriding-the-default-filter
super(ForsetiTestCase, self).setUp()
if not sys.warnoptions:
import warnings
warnings.simplefilter('ignore', ResourceWarning)
def assertStartsWith(self, actual, expected_start):
"""Assert that actual.startswith(expected_start) is True.
Args:
actual: str
expected_start: str
"""
if not actual.startswith(expected_start):
self.fail('%r does not start with %r' % (actual, expected_start))
def assertSameStructure(self, a, b, aname='a', bname='b', msg=None):
"""Asserts that two values contain the same structural content.
The two arguments should be data trees consisting of trees of dicts and
lists. They will be deeply compared by walking into the contents of
dicts and lists; other items will be compared using the == operator.
If the two structures differ in content, the failure message will
indicate the location within the structures where the first
difference is found. This may be helpful when comparing large
structures.
Args:
a: The first structure to compare.
b: The second structure to compare.
aname: Variable name to use for the first structure in assertion
messages.
bname: Variable name to use for the second structure.
msg: Additional text to include in the failure message.
"""
# Accumulate all the problems found so we can report all of them at once
# rather than just stopping at the first
problems = []
_WalkStructureForProblems(a, b, aname, bname, problems)
# Avoid spamming the user toooo much
max_problems_to_show = self.maxDiff // 80
if len(problems) > max_problems_to_show:
problems = problems[0:max_problems_to_show-1] + ['...']
if problems:
failure_message = '; '.join(problems)
if msg:
failure_message += (': ' + msg)
self.fail(failure_message)
_INT_TYPES = (int, int) # Sadly there is no types.IntTypes defined for us.
def _WalkStructureForProblems(a, b, aname, bname, problem_list):
"""The recursive comparison behind assertSameStructure."""
if type(a) != type(b) and not (
isinstance(a, _INT_TYPES) and isinstance(b, _INT_TYPES)):
# We do not distinguish between int and long types as 99.99% of Python 2
# code should never care. They collapse into a single type in Python 3.
problem_list.append('%s is a %r but %s is a %r' %
(aname, type(a), bname, type(b)))
# If they have different types there's no point continuing
return
if isinstance(a, collections.Mapping):
for k in a:
if k in b:
_WalkStructureForProblems(a[k], b[k],
'%s[%r]' % (aname, k), '%s[%r]' % (bname, k),
problem_list)
else:
problem_list.append('%s has [%r] but %s does not' % (aname, k, bname))
for k in b:
if k not in a:
problem_list.append('%s lacks [%r] but %s has it' % (aname, k, bname))
# Strings are Sequences but we'll just do those with regular !=
elif isinstance(a, collections.Sequence) and not isinstance(a, str):
minlen = min(len(a), len(b))
for i in range(minlen):
_WalkStructureForProblems(a[i], b[i],
'%s[%d]' % (aname, i), '%s[%d]' % (bname, i),
problem_list)
for i in range(minlen, len(a)):
problem_list.append('%s has [%i] but %s does not' % (aname, i, bname))
for i in range(minlen, len(b)):
problem_list.append('%s lacks [%i] but %s has it' % (aname, i, bname))
else:
if a != b:
problem_list.append('%s is %r but %s is %r' % (aname, a, bname, b))
def get_datafile_path(start_loc, filename):
"""Get the path for a data file."""
return os.path.join(
os.path.dirname(os.path.abspath(start_loc)),
'data',
filename)
def load_json(json_file_path):
"""Load json data from a file."""
data = {}
with open(json_file_path, 'r') as filedata:
data = json.load(filedata)
return data
|
{
"content_hash": "8efee84d5cafb8e4f0926f9d0441b477",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 91,
"avg_line_length": 35.416666666666664,
"alnum_prop": 0.6108597285067874,
"repo_name": "forseti-security/forseti-security",
"id": "23e1cafb8c24d851de31f1bc26d9a44a9c7ee4f2",
"size": "6139",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/unittest_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3652"
},
{
"name": "HCL",
"bytes": "37409"
},
{
"name": "JavaScript",
"bytes": "1833"
},
{
"name": "Jinja",
"bytes": "6379"
},
{
"name": "Makefile",
"bytes": "5427"
},
{
"name": "Open Policy Agent",
"bytes": "3600"
},
{
"name": "Python",
"bytes": "4140122"
},
{
"name": "Ruby",
"bytes": "37434"
},
{
"name": "Shell",
"bytes": "17062"
}
],
"symlink_target": ""
}
|
"""Gets all the saved reports for user's default account.
Tags: savedreports.list
"""
__author__ = 'jalc@google.com (Jose Alcerreca)'
import sys
from apiclient import sample_tools
from oauth2client import client
from adsense_util import get_account_id
MAX_PAGE_SIZE = 50
def main(argv):
# Authenticate and construct service.
service, unused_flags = sample_tools.init(
argv, 'adsense', 'v1.4', __doc__, __file__, parents=[],
scope='https://www.googleapis.com/auth/adsense.readonly')
try:
# Let the user pick account if more than one.
account_id = get_account_id(service)
# Retrieve ad client list in pages and display data as we receive it.
request = service.accounts().reports().saved().list(
accountId=account_id, maxResults=MAX_PAGE_SIZE)
while request is not None:
result = request.execute()
if 'items' in result:
saved_reports = result['items']
for saved_report in saved_reports:
print ('Saved ad style with ID "%s" and name "%s" was found.'
% (saved_report['id'], saved_report['name']))
request = service.reports().saved().list_next(request, result)
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
|
{
"content_hash": "0bee577287fa03628b6fad2e36de332d",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 30.02173913043478,
"alnum_prop": 0.6603910209992759,
"repo_name": "ya7lelkom/googleads-adsense-examples",
"id": "e386b228cf8771c230bbd02b07eb24663f8a9464",
"size": "1999",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/v1.4/get_all_saved_reports.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "53489"
},
{
"name": "CSS",
"bytes": "2845"
},
{
"name": "HTML",
"bytes": "3596"
},
{
"name": "Java",
"bytes": "140865"
},
{
"name": "PHP",
"bytes": "114319"
},
{
"name": "Python",
"bytes": "75316"
},
{
"name": "Ruby",
"bytes": "43093"
}
],
"symlink_target": ""
}
|
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_axis33.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'line'})
chart.axis_ids = [68827008, 68898816]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chart.set_x_axis({'name': 'XXX', 'name_font': {'rotation': -45, 'baseline': -1}})
chart.set_y_axis({'name': 'YYY', 'name_font': {'rotation': -45, 'baseline': -1}})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
{
"content_hash": "e82c3d7f3d21431a3136113811a48bc5",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 89,
"avg_line_length": 28.381818181818183,
"alnum_prop": 0.564381806534273,
"repo_name": "jkyeung/XlsxWriter",
"id": "bb8e6ef2097aa5540859c26a2bf01ecf054b2e8b",
"size": "1734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xlsxwriter/test/comparison/test_chart_axis33.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5113"
},
{
"name": "CSS",
"bytes": "16544"
},
{
"name": "HTML",
"bytes": "13100"
},
{
"name": "Makefile",
"bytes": "7819"
},
{
"name": "Perl",
"bytes": "3504"
},
{
"name": "Python",
"bytes": "2430294"
},
{
"name": "Shell",
"bytes": "6064"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# 引入这个 Field,其它的不要变
from DjangoUeditor.models import UEditorField
from django.core.urlresolvers import reverse
@python_2_unicode_compatible
class Column(models.Model):
name = models.CharField('栏目名称', max_length=256)
slug = models.CharField('栏目网址', max_length=256, db_index=True)
intro = models.TextField('栏目简介', default='')
nav_display = models.BooleanField('导航显示', default=False)
home_display = models.BooleanField('首页显示', default=False)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('column', args=(self.slug,))
class Meta:
verbose_name = '栏目'
verbose_name_plural = '栏目'
ordering = ['name'] # 按照哪个栏目排序
@python_2_unicode_compatible
class Article(models.Model):
column = models.ManyToManyField(Column, verbose_name='归属栏目')
title = models.CharField('标题', max_length=256)
slug = models.CharField('网址', max_length=256, db_index=True)
author = models.ForeignKey('auth.User', blank=True, null=True, verbose_name='作者')
#仅修改 content 字段
content = UEditorField('内容', height=300, width=1000,
default=u'', blank=True, imagePath="uploads/images/",
toolbars='besttome', filePath='uploads/files/')
pub_date = models.DateTimeField('发表时间', auto_now_add=True, editable=True)
update_time = models.DateTimeField('更新时间', auto_now=True, null=True)
published = models.BooleanField('正式发布', default=True)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('article', args=(self.pk, self.slug))
class Meta:
verbose_name = '教程'
verbose_name_plural = '教程'
|
{
"content_hash": "1230554a21150104bd897f3a6a0f9527",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 85,
"avg_line_length": 32.25,
"alnum_prop": 0.6760797342192691,
"repo_name": "yephper/django",
"id": "cc5e962f721a5ae4644b0dc1f4043ce6a18fbc16",
"size": "1982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/bin/minicms/news/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "1538"
},
{
"name": "CSS",
"bytes": "1697381"
},
{
"name": "HTML",
"bytes": "390772"
},
{
"name": "Java",
"bytes": "588"
},
{
"name": "JavaScript",
"bytes": "3172126"
},
{
"name": "Makefile",
"bytes": "134"
},
{
"name": "PHP",
"bytes": "19336"
},
{
"name": "Python",
"bytes": "13365273"
},
{
"name": "Shell",
"bytes": "837"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
}
|
""" autonomous.py - Version 1.0 2016-10-12
General framework based on Patrick Goebel's nav_test.py
Initial version based on ccam-navigation by Chris Mobley
Autonomous movement added by Jonathan Hodges
Define waypoint destinations for a robot to move autonomously within
a map framework.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy
import rospkg
import actionlib
from actionlib_msgs.msg import *
from geometry_msgs.msg import Pose, PoseWithCovarianceStamped, Point, Quaternion, Twist
from kuri_mbzirc_challenge_2_msgs.msg import BoxPositionAction, BoxPositionGoal
from tf.transformations import quaternion_from_euler
from decimal import *
import time
from math import radians, pi
class mbzirc_c2_auto():
def __init__(self):
rospy.init_node('test_box_detection', anonymous=True)
# Enable shutdown in rospy (This is important so we cancel any move_base goals
# when the node is killed)
rospy.on_shutdown(self.shutdown)
# Subscribe to the action server
self.client = actionlib.SimpleActionClient("get_box_cluster", BoxPositionAction)
rospy.loginfo("Waiting for action server...")
# Wait 60 seconds for the action server to become available
self.client.wait_for_server(rospy.Duration(60))
rospy.loginfo("Connected to action server")
# Send start command
rospy.loginfo("Sending start command")
goal = BoxPositionGoal()
goal.request = goal.REQUEST_START
goal.range_max = 30
goal.range_max = 60
goal.angle_min = -pi/2
goal.angle_max = pi/2
self.execute(goal)
rospy.loginfo("Started")
#time.sleep(30)
#rospy.loginfo("Sending stop command")
#goal = BoxPositionGoal()
#goal.request = goal.REQUEST_STOP
#self.execute(goal)
#rospy.loginfo("Stopped")
rospy.signal_shutdown("Complete")
def execute(self, goal):
# Send the goal pose to the MoveBaseAction server
self.client.send_goal(goal)
# Allow 1 minute to get there
finished_within_time = self.client.wait_for_result(rospy.Duration(60))
# If we don't get there in time, abort the goal
if not finished_within_time:
self.client.cancel_goal()
rospy.loginfo("Timed out achieving goal")
else:
# We made it!
state = self.client.get_state()
if state == GoalStatus.SUCCEEDED:
rospy.loginfo("Goal succeeded!")
def shutdown(self):
rospy.loginfo("Stopping the robot...")
self.client.cancel_goal()
if __name__ == '__main__':
try:
mbzirc_c2_auto()
rospy.spin()
except rospy.ROSInterruptException:
rospy.loginfo("mbzirc_c2_auto finished.")
|
{
"content_hash": "fff7a30873bec2e79b41044f99d965b1",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 87,
"avg_line_length": 30.466019417475728,
"alnum_prop": 0.7087316762268961,
"repo_name": "orsonl/MBZIRC_challenge2",
"id": "70fd33e04f19b6b60b4eaaa62dc06de3503c2599",
"size": "3161",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kuri_mbzirc_challenge_2_exploration/scripts/test_velodyne_box_detection.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Arduino",
"bytes": "2770"
},
{
"name": "C++",
"bytes": "180245"
},
{
"name": "CMake",
"bytes": "14905"
},
{
"name": "HTML",
"bytes": "5060"
},
{
"name": "JavaScript",
"bytes": "11061"
},
{
"name": "Python",
"bytes": "158954"
},
{
"name": "Shell",
"bytes": "569"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('contentcuration', '0041_channel_previous_tree'),
]
operations = [
migrations.AlterField(
model_name='channel',
name='name',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='contentkind',
name='kind',
field=models.CharField(choices=[(b'topic', b'Topic'), (b'video', b'Video'), (b'audio', b'Audio'), (b'exercise', b'Exercise'),
(b'document', b'Document'), (b'html5', b'HTML5 App')], max_length=200, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='fileformat',
name='extension',
field=models.CharField(choices=[(b'mp4', b'MP4 Video'), (b'vtt', b'VTT Subtitle'), (b'srt', b'SRT Subtitle'), (b'mp3', b'MP3 Audio'), (b'wav', b'WAV Audio'), (b'pdf', b'PDF Document'), (b'jpg', b'JPG Image'), (
b'jpeg', b'JPEG Image'), (b'png', b'PNG Image'), (b'json', b'JSON'), (b'svg', b'SVG Image'), (b'perseus', b'Perseus Exercise'), (b'zip', b'HTML5 Zip')], max_length=40, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='formatpreset',
name='id',
field=models.CharField(choices=[(b'high_res_video', b'High Resolution'), (b'low_res_video', b'Low Resolution'), (b'vector_video', b'Vectorized'), (b'video_thumbnail', b'Thumbnail'), (b'video_subtitle', b'Subtitle'), (b'audio', b'Audio'), (b'audio_thumbnail', b'Thumbnail'), (b'document', b'Document'), (b'document_thumbnail', b'Thumbnail'), (
b'exercise', b'Exercise'), (b'exercise_thumbnail', b'Thumbnail'), (b'exercise_image', b'Exercise Image'), (b'exercise_graphie', b'Exercise Graphie'), (b'channel_thumbnail', b'Channel Thumbnail'), (b'html5_zip', b'HTML5 Zip'), (b'html5_thumbnail', b'HTML5 Thumbnail')], max_length=150, primary_key=True, serialize=False),
),
]
|
{
"content_hash": "2f8f2aca251d0e773b2ea7bcac438b34",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 354,
"avg_line_length": 58.486486486486484,
"alnum_prop": 0.5910351201478743,
"repo_name": "jayoshih/content-curation",
"id": "a6af55040b77a62d1e6aa16582c9017df0305bec",
"size": "2236",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contentcuration/contentcuration/migrations/0042_auto_20161206_1641.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "175092"
},
{
"name": "Dockerfile",
"bytes": "2215"
},
{
"name": "HTML",
"bytes": "512248"
},
{
"name": "JavaScript",
"bytes": "668479"
},
{
"name": "Makefile",
"bytes": "3409"
},
{
"name": "Python",
"bytes": "844840"
},
{
"name": "Shell",
"bytes": "6970"
},
{
"name": "Smarty",
"bytes": "6640"
},
{
"name": "Vue",
"bytes": "21539"
}
],
"symlink_target": ""
}
|
"""This package contains code and wrapper classes to access the different
types of cloud providers. For example, OpenStack, Amazon, etc.
To add support to a different cloud provider, we need to drop a module
within the providers package and define a *configure_provider* function
within the module. This function will be responsible for checking whether
the necessary packages to access the provider are installed will return
information on the provider:
- Unique Provider's Identification which is a string.
- Reference to a concrete calls built upon the AbstractMachineManager.
- Unique Provider's index which is an integer.
For example, the openstack.py module has the following *configure_provider*
function::
def configure_provider():
import novaclient.client
return ("OPENSTACK", MachineManager, 2)
Each supported provider must return a reference to a concrete class that
inherits from the AbstractMachineManager class defined in this module. This
class is an entry point to a set of methods to access a provider.
"""
import pkgutil
import inspect
import logging
from mysql.fabric.errors import ProviderError
PROVIDERS_TYPE = {}
PROVIDERS_IDX = {}
_LOGGER = logging.getLogger(__name__)
def find_providers():
"""Find which are the available commands.
"""
for imp, name, ispkg in pkgutil.walk_packages(__path__, __name__ + "."):
mod = imp.find_module(name).load_module(name)
_LOGGER.debug("%s %s has got __name__ %s",
"Package" if ispkg else "Module", name, mod.__name__
)
for (mem_name, mem_value) in inspect.getmembers(mod):
if mem_name == "configure_provider" and inspect.isfunction(mem_value):
try:
provider, manager, idx = mem_value()
if provider in PROVIDERS_TYPE:
raise ProviderError(
"Provider type (%s) is already defined (%s)." %
(provider, PROVIDERS_TYPE[provider])
)
if idx in PROVIDERS_IDX:
raise ProviderError(
"Provider index (%s) is already defined (%s)." %
(idx, PROVIDERS_IDX[idx])
)
PROVIDERS_TYPE[provider] = {'manager' : manager, 'idx' : idx}
PROVIDERS_IDX[idx] = {'provider' : provider}
except ImportError:
pass
_LOGGER.debug("Providers %s.", PROVIDERS_TYPE)
def get_provider_idx(provider_type):
"""Return the index associated to the type.
"""
try:
return PROVIDERS_TYPE[provider_type]['idx']
except KeyError as error:
raise ProviderError(
"Provider type (%s) is not supported yet." % (provider_type, )
)
def get_provider_type(provider_idx):
"""Return the type associated to the index.
"""
try:
return PROVIDERS_IDX[provider_idx]['provider']
except KeyError as error:
raise ProviderError(
"Provider index (%s) does not exist." % (provider_idx, )
)
def get_provider_manager(provider_type):
"""Return a reference to a wrapper class that provides the appropriate
methods to access the cloud provider.
:param provider_type: Provider type.
"""
try:
return PROVIDERS_TYPE[provider_type]['manager']
except KeyError as error:
raise ProviderError(
"Provider type (%s) is not supported yet." % (provider_type, )
)
class AbstractMachineManager(object):
"""Wrapper class that is used to manage machines in the cloud.
:param provider: Reference to provider object.
:param version: Version.
:rtype version: string
"""
def __init__(self, provider, version=None):
self.__provider = provider
self.__version = version
@property
def provider(self):
"""Return a reference to the provider.
"""
return self.__provider
@property
def version(self):
"""Return version.
"""
return self.__version
def create_machines(self, parameters, wait_spawning):
"""Create machines.
:param parameters: Parameters to create machines.
:param wait_spwaning: Whether one should wait until the provider
finishes its task or not.
"""
raise NotImplementedError
def search_machines(self, generic_filters, meta_filters):
"""Return machines based on the provided filters.
:param generic_filters: Dictionary with criteria to search for.
:param meta_filters: Dictionary with criteria to search for.
:return: List with machines that match criteria.
"""
raise NotImplementedError
def destroy_machine(self, machine_uuid):
"""Destroy a machine.
:param machine_uuid: UUID that uniquely identifies the machine.
"""
raise NotImplementedError
def assign_public_ip(self, machine, pool):
"""Assign public IP address to a machine.
:param machine: Reference to a machine.
:param pool: Pool from where the address will be withdrawn.
"""
raise NotImplementedError
def remove_public_ip(self, machine):
"""Remove public addresses assigned to a machine.
:param machine: Reference to a machine.
"""
raise NotImplementedError
def create_snapshot(self, machine_uuid, wait_spawning):
"""Create a snapshot from a machine.
:param machine_uuid: Machine's UUID.
:param wait_spwaning: Whether one should wait until the provider
finishes its task or not.
"""
raise NotImplementedError
def destroy_snapshot(self, machine_uuid):
"""Destroy snapshots associated to a machine.
:param machine_uuid: Machine's UUID.
"""
raise NotImplementedError
|
{
"content_hash": "c48504060af950392f53cae110effc0c",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 82,
"avg_line_length": 34.182857142857145,
"alnum_prop": 0.623370110330993,
"repo_name": "scavarda/mysql-dbcompare",
"id": "7205c8c3147634922828bc3a484d788b0078993a",
"size": "6711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysql-utilities-1.6.0/mysql/fabric/providers/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7324"
},
{
"name": "Groff",
"bytes": "332329"
},
{
"name": "Python",
"bytes": "3103169"
}
],
"symlink_target": ""
}
|
try:
from Plugins import Plugin
except:
from PEATDB.Plugins import Plugin
from Tkinter import *
import Pmw
import os, sys, math, string
import csv
import math, numpy
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
from PEATDB.Ekin.Base import EkinProject,EkinDataset
import PEATDB.Ekin.Fitting as Fitting
import tkFileDialog
class VantHoff(Plugin):
"""A plugin to do Van't Hoff Analysis of temperature melting curves"""
"""Author: Damien Farrell"""
capabilities = ['gui','uses_sidepane']
requires = ['pylab','numpy']
menuentry = "Van't Hoff Analysis"
gui_methods = {'getCSV': 'Import CSV',
'loadEkin':'Load Ekin Proj',
'saveEkin':'Save Ekin Proj',
'doAnalysis':"Do Analysis",
#'benchmark': 'Do Benchmark',
'close':'Close' }
about = "A plugin to do Van't Hoff Analysis of temperature melting curves"
R = 8.3144
def __init__(self):
self.path = os.path.expanduser("~")
self.pltConfig()
self.E = None
return
def main(self, parent):
if parent==None:
return
self.parent = parent
self.DB = parent.DB
self.xydata = None
self._doFrame()
return
def _doFrame(self):
if 'uses_sidepane' in self.capabilities:
self.mainwin = self.parent.createChildFrame(width=600)
else:
self.mainwin=Toplevel()
self.mainwin.title(self.menuentry)
self.mainwin.geometry('800x600+200+100')
methods = self._getmethods()
fr = Frame(self.mainwin)
fr.pack(side=LEFT,fill=BOTH)
methods = [m for m in methods if m[0] in self.gui_methods.keys()]
self._createButtons(methods, fr)
self.showDatasetSelector()
self.doall = Pmw.RadioSelect(fr,
buttontype = 'checkbutton',
orient = 'horizontal',
labelpos = 'w')
self.doall.add('Process All')
self.doall.pack()
self.conversions = Pmw.RadioSelect(fr,
buttontype = 'checkbutton',
orient = 'horizontal',
labelpos = 'w')
self.conversions.add('Convert Celsius-Kelvin')
self.conversions.pack()
self.methods = Pmw.RadioSelect(fr,
buttontype = 'checkbutton',
orient = 'vertical',
labelpos = 'w',
label_text = 'Methods:')
for m in ['method 1','method 2','method 3', 'method 4']:
self.methods.add(m)
self.methods.invoke('method 1')
self.methods.pack()
self.sm = Pmw.EntryField(fr,
labelpos = 'w',
value = 5,
label_text = 'Smoothing:')
self.sm.pack()
self.tw = Pmw.EntryField(fr,
labelpos = 'w',
value = 60,
label_text = 'Width of transition:')
self.tw.pack()
return
def _getmethods(self):
"""Get a list of all available public methods"""
import inspect
mems = inspect.getmembers(self, inspect.ismethod)
methods = [m for m in mems if not m[0].startswith('_')]
return methods
def _createButtons(self, methods, fr=None):
"""Dynamically create buttons for supplied methods, which is a tuple
of (method name, label)"""
for m in methods:
b=Button(fr,text=self.gui_methods[m[0]],command=m[1])
b.pack(side=TOP,fill=BOTH)
return
def close(self):
self.mainwin.destroy()
self.plotframe = None
return
def showDatasetSelector(self):
if self.E==None:
return
if hasattr(self, 'dmenu'):
self.dmenu.destroy()
self.dmenu = Pmw.OptionMenu(self.mainwin,
labelpos = 'w',
label_text = 'Dataset:',
items = sorted(self.E.datasets),
command=self.showPreview,
menubutton_width = 8)
self.dmenu.pack(side=TOP,fill=BOTH)
return
def showPreview(self,event=None):
if self.E == None:
return
if not hasattr(self, 'plotframe') or self.plotframe == None:
from Ekin.Plotting import PlotPanel
self.plotframe = PlotPanel(parent=self.mainwin, side=BOTTOM)
self.plotframe.setProject(self.E)
d = self.dmenu.getcurselection()
self.plotframe.plotCurrent(d)
#plt.close(1)
return
def getCSV(self):
"""Import a csv file"""
self.E = EkinProject()
from PEATDB.Ekin.IO import Importer
importer = Importer(self,parent_win=self.mainwin)
newdata = importer.import_multiple()
if newdata == None: return
for n in newdata.keys():
self.E.insertDataset(newdata[n], n, update=None)
print 'imported %s datasets' %len(self.E.datasets)
self.showDatasetSelector()
self.showPreview()
return
def loadEkin(self):
"""Load the ekin prj"""
filename=tkFileDialog.askopenfilename(defaultextension='.ekinprj',
initialdir=os.getcwd(),
filetypes=[("ekinprj","*.ekinprj"),
("All files","*.*")],
parent=self.mainwin)
if not os.path.isfile(filename):
return
self.E = EkinProject()
self.E.openProject(filename)
self.showDatasetSelector()
self.showPreview()
return
def saveEkin(self):
"""save proj"""
if self.E != None:
if self.E.filename == None:
self.E.filename = tkFileDialog.asksaveasfilename(defaultextension='.ekinprj',
initialdir=os.getcwd(),
filetypes=[("ekinprj","*.ekinprj"),
("All files","*.*")],
parent=self.mainwin)
self.E.saveProject()
print 'saved ekin proj'
return
def doAnalysis(self):
"""Execute from GUI"""
if self.E == None:
return
methods = self.methods.getcurselection()
if 'Process All' in self.doall.getcurselection():
self.doAll(methods=methods)
else:
if 'method 1' in methods:
self.fitVantHoff(E=self.E,d=self.dmenu.getcurselection(),
transwidth=int(self.tw.getvalue()))
if 'method 2' in methods:
self.fitElwellSchellman(E=self.E,d=self.dmenu.getcurselection(),
transwidth=int(self.tw.getvalue()))
if 'method 3' in methods:
self.fitDifferentialCurve(E=self.E,d=self.dmenu.getcurselection(),
smooth=int(self.sm.getvalue()))
if 'method 4' in methods:
self.breslauerMethod(E=self.E,d=self.dmenu.getcurselection())#,invert=opts.invert)
return
def guessMidpoint(self,x,y):
"""guess midpoint for unfolding model"""
midy=min(y)+(max(y)-min(y))/2.0
midx=0
closest=1e4
for i in range(len(x)):
c=abs(y[i]-midy)
if c<closest:
midx=x[i]
closest=c
return midx
def transformCD(self,x,y,transwidth=None,ax=None):
"""Transform raw data into fraction unfolded per temp value, by fitting to
a general unfolding equation that extracts baseline/slopes"""
#fit baseline slopes and get intercepts
d50 = self.guessMidpoint(x,y)
print 'fitting to get baseline slopes and intercepts..'
print 'midpoint is %s' %d50
A,X=Fitting.doFit(expdata=zip(x,y),model='Unfolding',noiter=50,silent=True,
guess=False,startvalues=[1,1,1,1,1,d50])
#print X.getResult()
fity = X.getFitLine(x)
fd=X.getFitDict()
if ax!=None:
p=ax.plot(x,fity,'r',lw=2)
self.drawParams(ax,fd)
#we then use slopes and intercepts get frac unfolded at each temp
mn = fd['bn']; mu = fd['bd'] #slopes
#if mu>0.01: mu = 0.01
yn = fd['an']; yu = fd['ad'] #intercepts
d50 = fd['d50']; m = fd['m']
t=[]; f=[]
#print mu, mn
for T,yo in zip(x,y):
fu = (yo-(yn+mn*T)) / ((yu+mu*T)-(yn+mn*T))
#print fu, (yo-(yn+mn*T)), (m), mu, mn
#if f>0:
f.append(fu)
t.append(T)
#try to take useful transition region of data
at,af=t,f
diff=1e5
if transwidth != None:
for i in t:
d=abs(i-d50)
if d<diff:
mid = t.index(i)
diff=d
L=int(mid-transwidth); U=int(mid+transwidth)
t,f = t[L:U], f[L:U]
return at,af,t,f
def fitVantHoff(self, E=None, d=None, xy=None, transwidth=80, invert=False,
show=True, figname=None):
"""Derive fraction unfolded, get K and fit to Van't Hoff.
see http://www.jbc.org/content/277/43/40717.full
or http://www.ncbi.nlm.nih.gov/pmc/articles/PMC2144003/
"""
if E != None:
if not d in E.datasets:
print 'no such dataset, %s' %d
print 'available datasets:', E.datasets
return
ek = E.getDataset(d)
x,y = ek.getxySorted()
elif xy!=None:
x,y = xy
if 'Convert Celsius-Kelvin' in self.conversions.getcurselection():
x = [i+273 for i in x]
if invert == True:
y = [max(y)-i for i in y[:]]
f=plt.figure(figsize=(18,6))
ax=f.add_subplot(131)
p=ax.plot(x,y,'o',alpha=0.6)
ax.set_xlabel('T(K)'); ax.set_ylabel('mdeg')
ax.set_title('raw data')
x1,y1,x,y = self.transformCD(x,y,transwidth,ax)
cw=csv.writer(open('frac_unfolded_'+d+'.csv','w'))
cw.writerow(['temp','frac'])
for i in zip(x1,y1):
cw.writerow(i)
#derive lnK vs 1/T
t=[]; k=[]
for T,fu in zip(x,y):
if fu>=1 or fu<=0:
continue
K = fu/(1-fu)
klog = math.log(K)
k.append(klog)
t.append(1/T)
if len(t)<2: return None, None, None
ax=f.add_subplot(132)
p=ax.plot(x1,y1,'o',color='g',alpha=0.6)
ax.set_xlabel('T(K)'); ax.set_ylabel('fu')
ax.set_title('fraction unfolded')
ax=f.add_subplot(133)
p=ax.plot(t,k,'x',mew=2,color='black')
ax.set_xlabel('1/T')#(r'$1/T ($K^-1)$')
ax.set_ylabel('ln K')
formatter = matplotlib.ticker.ScalarFormatter()
formatter.set_scientific(True)
formatter.set_powerlimits((0,0))
ax.xaxis.set_major_formatter(formatter)
for l in ax.get_xticklabels():
l.set_rotation(30)
#fit this van't hoff plot
A,X=Fitting.doFit(expdata=zip(t,k),model='Linear')
fitk = X.getFitLine(t)
p=ax.plot(t,fitk,'r',lw=2)
fd=X.getFitDict()
#self.drawParams(ax,fd)
#slope is deltaH/R/1000 in kJ
deltaH = -fd['a']*self.R/1000
deltaS = fd['b']*self.R/1000
f.suptitle("Method 1 - deltaH: %2.2f deltaS: %2.2f" %(deltaH,deltaS),size=18)
f.subplots_adjust(bottom=0.15,top=0.85)
if show==True:
self.showTkFigure(f)
if figname == None: figname = d
figname = figname.replace('.','_')
fname = figname+'m1'+'.png'
f.savefig(fname,dpi=300)
print 'plot saved to %s' %os.path.abspath(fname)
#plt.close()
if E!=None:
fdata = Fitting.makeFitData(X.name,vrs=X.variables)
E.insertDataset(xydata=[t,k], newname=d+'_vanthoff',replace=True,fit=fdata)
#E.saveProject()
return deltaH, deltaS, ax
def fitElwellSchellman(self,E=None, d=None, xy=None,transwidth=50,
invert=False,show=True,figname=None):
"""Fit entire raw data simultaneously to the three main thermodynamic
parameters using Elwell/Schellman method"""
if E !=None:
ek = E.getDataset(d)
x,y,a, xerr,yerr = ek.getAll()
elif xy!=None:
x,y = xy
else:
return
if invert == True:
y = [max(y)-i for i in y[:]]
f=plt.figure(figsize=(10,5))
ax=f.add_subplot(121)
p=ax.plot(x,y,'o',alpha=0.5)
ax.set_xlabel('T');ax.set_xlabel('mdeg')
ax.set_title('raw data')
x1,y1,x,y = self.transformCD(x,y,transwidth,ax)
t=[];dg=[]
R=8.3144e-3
for T,fu in zip(x,y):
if fu>=1 or fu<=0:
continue
K = fu/(1-fu)
deltaGt = -R * T * math.log(K)
dg.append(deltaGt)
t.append(T)
ax1=f.add_subplot(122)
p=ax1.plot(t,dg,'x',mew=2,color='black')
ax1.set_xlabel('T'); ax1.set_ylabel('dG(T)')
ax.set_title('stability curve')
A,X=Fitting.doFit(expdata=zip(t,dg),model='schellman',grad=1e-9,conv=1e-9)
fity = X.getFitLine(t)
p=ax1.plot(t,fity,'r',lw=2)
fd=X.getFitDict()
self.drawParams(ax1,fd)
deltaH=fd['deltaH']; deltacp=fd['deltacp']; Tm=fd['Tm']
f.suptitle("Method 2 - deltaH: %2.2f deltaCp: %2.2e Tm: %2.2f" %(deltaH,deltacp,Tm),size=18)
if show == True:
self.showTkFigure(f)
if figname == None: figname = d
figname = figname.replace('.','_')
fname = figname+'m1'+'.png'
f.savefig(fname,dpi=300)
print 'plot saved to %s' %os.path.abspath(fname)
if E!=None:
fdata = Fitting.makeFitData(X.name,vrs=X.variables)
E.insertDataset(xydata=[t,dg], newname=d+'_vanthoff2',replace=True,fit=fdata)
#E.saveProject()
return deltaH, Tm, deltacp
def breslauerMethod(self,E=None, d=None, xy=None,invert=False,
show=True,figname=None):
"""Finds slope of trans region and plugs this in to equation
http://www.springerlink.com/content/r34n0201g30563u7/ """
if E !=None:
ek = E.getDataset(d)
x,y,a, xerr,yerr = ek.getAll()
elif xy!=None:
x,y = xy
else:
return
f=plt.figure(figsize=(10,6))
ax=f.add_subplot(111)
ax.set_xlabel('T')
p=ax.plot(x,y,'o',alpha=0.5)
d50 = self.guessMidpoint(x,y)
A,X=Fitting.doFit(expdata=zip(x,y),model='Unfolding',conv=1e-7,noiter=60,
guess=False,startvalues=[1,1,1,1,1,d50])
fity = X.getFitLine(x)
p=ax.plot(x,fity,'r',lw=2)
fd=X.getFitDict()
self.drawParams(ax,fd)
Tm = fd['d50']; m = fd['m']
R = 8.3144e-3
deltaH = R * math.pow(Tm,2) * m
f.suptitle("Method 4 - deltaH: %2.2f Tm: %2.2f" %(deltaH,Tm),size=18)
if show == True:
self.showTkFigure(f)
if figname != None:
figname = figname.replace('.','_')
f.savefig(figname)
plt.close()
return deltaH, Tm
def fitDifferentialCurve(self, E=None, d=None, xy=None,smooth=0,
invert=False,show=True,figname=None):
"""Derive differential denaturation curve and fit to get deltaH
We smooth the unfolding curve and then differentiate and finally
fit to a 3 parameter equation.
See http://www.ncbi.nlm.nih.gov/pubmed/10933511"""
if E !=None:
ek = E.getDataset(d)
x,y,a, xerr,yerr = ek.getAll()
elif xy!=None:
x,y = xy
else:
return
if invert == True:
y = [max(y)-i for i in y[:]]
leg=[]; lines=[]
f=plt.figure(figsize=(10,5))
ax=f.add_subplot(121)
p=ax.plot(x,y,'x',color='black',mew=3,alpha=0.5)
leg.append(p); lines.append('original')
#smooth
if smooth == 0:
smooth=int(len(x)/15.0)
s=self.smoothListGaussian(y,smooth)
p=ax.plot(x[:len(s)-1],s[:-1],lw=3)
leg.append(p); lines.append('smoothed')
ax.set_title("original data")
ax.set_xlabel('T')
ax1=f.add_subplot(122)
#differentiate
dx,ds = self.differentiate(x[:len(s)],s)
#ds = [i/max(ds) for i in ds]
ds = [i*10 for i in ds]
cw=csv.writer(open('diffcd.csv','w'))
for row in zip(dx,ds):
cw.writerow(row)
p=ax1.plot(dx,ds,'-',lw=1.5,alpha=0.7,color='black')
leg.append(p); lines.append('differential')
ax1.set_title("differential denaturation")
ax1.set_xlabel('T'); ax1.set_ylabel('dsignal/dT')
A,X=Fitting.doFit(expdata=zip(dx,ds),model='diffDenaturation',grad=1e-9,conv=1e-10)
fity = X.getFitLine(dx)
p=ax1.plot(dx,fity,'r',lw=2)
leg.append(p); lines.append('fit')
t=X.getFitDict()
self.drawParams(ax1,t)
dHkcal=t['deltaH']/4.184
f.suptitle('Method 3 - deltaH: %2.2f kJ/mol (%2.2f kcal) Tm: %2.2f' %(t['deltaH'],dHkcal,t['Tm']),size=18)
ax.legend(leg,lines,loc='best',prop=FontProperties(size="smaller"))
#f.subplots_adjust(hspace=0.8)
if show == True:
self.showTkFigure(f)
if figname != None:
figname = figname.replace('.','_')
f.savefig(figname+'m3',dpi=300)
plt.close()
if E!=None:
fdata = Fitting.makeFitData(X.name,vrs=X.variables)
E.insertDataset(xydata=[dx,ds], newname=d+'_diff',replace=True,fit=fdata)
#E.saveProject()
return t['deltaH'],t['Tm']
def differentiate(self, x,y):
dy = numpy.diff(y,1)
dx = x[:len(dy)]
return dx,dy
def smoothListGaussian(self,data,degree=5):
"""Gaussian data smoothing function"""
#buffer data to avoid offset result
data=list(data)
data = [data[0]]*(degree-1) + data + [data[-1]]*degree
window=degree*2-1
weight=numpy.array([1.0]*window)
weightGauss=[]
for i in range(window):
i=i-degree+1
frac=i/float(window)
gauss=1/(numpy.exp((4*(frac))**2))
weightGauss.append(gauss)
weight=numpy.array(weightGauss)*weight
smoothed=[0.0]*(len(data)-window)
for i in range(len(smoothed)):
smoothed[i]=sum(numpy.array(data[i:i+window])*weight)/sum(weight)
return smoothed
def invert(self,data):
inv=[i for i in data]
return inv
def simulateCD(self,noise=1.0):
"""Simulate some CD spec data"""
x=list(numpy.arange(290,380,0.2)); y=[]
X=Fitting.getFitter(model='Unfolding',
vrs=[-16, 0.01, -11.6, 0.01, 2.7, 324])
fity = X.getFitLine(x)
for i in fity:
noise=numpy.random.normal(i, 1.0/2)
y.append(i+noise)
cw=csv.writer(open('cd.csv','w'))
for row in zip(x,y):
cw.writerow(row)
return x,y
def drawParams(self,ax,d):
ymin, ymax = ax.get_ylim()
xmin, xmax = ax.get_xlim()
inc=(ymax-ymin)/20
xinc=(xmax-xmin)/20
y=ymax-inc
for k in d:
s = k+'='+str(round(d[k],3))
ax.text(xmin+xinc,y,s,fontsize=10)
y-=inc
return
def pltConfig(self):
#plt.rc('text', usetex=True)
plt.rc('figure.subplot', hspace=0.3,wspace=0.3)
#plt.rc('axes',titlesize=22)
plt.rc('font',family='monospace')
return
def doAll(self, methods=['method 1']):
"""Process all datasets in ekinprj"""
E=self.E
vals={}
from Dialogs import PEATDialog
pb=PEATDialog(self.mainwin, option='progressbar',
message='Analysing Data..')
pb.update_progress(0)
total = len(E.datasets); count=0
for d in E.datasets:
if '_diff' in d or '_vanthoff' in d:
continue
vals[d]={}
name = d
if 'method 1' in methods:
vals[d]['dH1'], vals[d]['dS1'], ax = self.fitVantHoff(E,d,
transwidth=int(self.tw.getvalue()),
show=False,figname=name)
if 'method 2' in methods:
vals[d]['dH2'], vals[d]['dTm2'], vals[d]['dCp2'] = self.fitElwellSchellman(E,d,show=False,figname=name)
if 'method 3' in methods:
vals[d]['dH3'], vals[d]['dTm3'] = self.fitDifferentialCurve(E,d,show=False,figname=name)
count += 1
pb.update_progress(float(count)/total*100.0)
pb.close()
self.showTable(vals)
return
def showTable(self, data):
"""Show results in table"""
from PEATDB.DictEdit import DictEditor
D=DictEditor(self.mainwin)
D.loadTable(data)
return
def benchmark(self,E=None,d=None, method=1):
"""Test methods with varying paramaters, smoothing etc"""
if E==None and self.E != None:
E = self.E; d=self.dmenu.getcurselection()
path='vh_benchmark'
if not os.path.exists(path):
os.mkdir(path)
dHvals=[]
if method == 1:
xlabel = 'width (K)'
title = 'method 1: deltaH variation with trans region width fit'
vals=range(5,140,5)
for w in vals:
dH, dS, ax = self.fitVantHoff(E,d,transwidth=w,show=False,
figname=os.path.join(path,'%s_%s.png' %(d,w)))
if dH == None: dH=0
dHvals.append(dH)
#take best values from middle
#dHvals= dHvals[5:16]
elif method == 2:
xlabel = 'width (K)'
title = 'method 2: deltaH variation with width fit'
vals=range(5,140,5)
for w in vals:
dH, dcp, dTm = self.fitElwellSchellman(E,d,transwidth=w,show=False,
figname=os.path.join(path,'%s_%s.png' %(d,w)))
dHvals.append(dH)
elif method == 3:
xlabel = 'smoothing degree'
title = 'method 3: deltaH variation with degree of smoothing'
vals=range(1,30,3)
for s in vals:
dH, dTm = self.fitDifferentialCurve(E,d,smooth=s,show=False,
figname=os.path.join(path,'%s_%s.png' %(d,s)))
dHvals.append(dH)
mean = numpy.mean(dHvals)
stdev = numpy.std(dHvals)
f=plt.figure()
ax=f.add_subplot(111)
ax.plot(vals, dHvals,lw=2)
ax.set_xlabel(xlabel)
ax.set_ylabel('deltaH (kJ)')
ax.set_title('mean: %2.2f stdev: %2.2f'%(mean, stdev))
f.suptitle(title)
f.savefig('benchmark_%s.png' %method)
cw=csv.writer(open('benchmark_%s.csv' %method,'w'))
for row in zip(vals,dHvals):
cw.writerow(row)
return
def benchmarkLimitedData(self, E=None,d=None, method=1):
"""test any method with varying limited data"""
if E==None and self.E != None:
E = self.E; d=self.dmenu.getcurselection()
path='vh_benchmark'
if not os.path.exists(path):
os.mkdir(path)
dHvals=[]
vals=[]
if method == 1:
L=range(5,140,5)
for w in vals:
dH, dS, ax = self.fitVantHoff(E,d,transwidth=w,show=False,
figname=os.path.join(path,'%s_%s.png' %(d,w)))
return
@classmethod
def plotCorrelation(self,x=None,y=None,xlabel='method1',ylabel='method2'):
if x==None:
data=open('compared.csv','r')
cr=csv.reader(data)
x=[float(r[0]) for r in cr]; data.seek(0)
y=[float(r[1]) for r in cr]
f=plt.figure()
ax=f.add_subplot(111)
line = ax.scatter(x, y, marker='o',alpha=0.8)
cl = numpy.arange(0,max(x)+50)
ax.plot(cl, cl, 'g', alpha=0.5,lw=2)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_xlim(150,600); ax.set_ylim(150,600)
ax.set_title('Correlation')
from scipy.stats import stats
cc = str(round(pow(stats.pearsonr(x,y)[0],2),2))
ax.text(400,180, r'$r^2= %s$' %cc, fontsize=16)
self.showTkFigure(f)
return
def showTkFigure(self, fig):
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
fr = Toplevel()
canvas = FigureCanvasTkAgg(fig, master=fr)
#self.canvas.show()
canvas.get_tk_widget().pack(side=TOP, fill=X, expand=1)
mtoolbar = NavigationToolbar2TkAgg(canvas, fr)
mtoolbar.update()
canvas._tkcanvas.pack(side=BOTTOM, fill=BOTH, expand=1)
return
def main():
"""Run some analysis"""
from optparse import OptionParser
parser = OptionParser()
app = VantHoff()
parser.add_option("-f", "--file", dest="file",
help="Open a local db")
parser.add_option("-e", "--ekinprj", dest="ekinprj",
help="Open an ekin project")
parser.add_option("-d", "--dataset", dest="dataset",
help="Dataset name")
parser.add_option("-m", "--method", dest="method", default=1, type='int',
help="Choose method - 1: Van't Hoff plot, 2: Schellman, 3: Differential fit, 4: Breslauer")
parser.add_option("-b", "--benchmark", dest="benchmark", action='store_true',
help="Test", default=False)
parser.add_option("-a", "--all", dest="all", action='store_true',
help="Do all datasets in ekinprj", default=False)
parser.add_option("-w", "--width", dest="width", default=50, type='int',
help="Width of transition region to fit for method 1")
parser.add_option("-s", "--smoothing", dest="smoothing", default=5, type='int',
help="Degree of smoothing to apply in method 2 (default 5)")
parser.add_option("-i", "--invert", dest="invert", action='store_true',
help="Invert raw data", default=False)
opts, remainder = parser.parse_args()
if opts.file != None and os.path.exists(opts.file):
app.loadDB(opts.file)
if opts.ekinprj != None and os.path.exists(opts.ekinprj):
E = EkinProject()
E.openProject(opts.ekinprj)
d = opts.dataset
else:
x,y = app.simulateCD()
E = EkinProject()
d='cdtest'
E.insertDataset(xydata=[x,y], newname=d)
if opts.all == True:
self.doAll(E, methods)
if opts.benchmark == True:
app.benchmark(E,d,method=opts.method)
#app.plotCorrelation()
else:
if opts.method == 1:
app.fitVantHoff(E,d,transwidth=opts.width,invert=opts.invert,figname=d)
elif opts.method == 2:
app.fitElwellSchellman(E,d,transwidth=opts.width,invert=opts.invert,figname=d)
elif opts.method == 3:
app.fitDifferentialCurve(E,d,smooth=opts.smoothing,invert=opts.invert,figname=d)
elif opts.method == 4:
app.breslauerMethod(E,d,invert=opts.invert)
if __name__ == '__main__':
main()
|
{
"content_hash": "96fa5d4b93003f833f1a7004d3f9c176",
"timestamp": "",
"source": "github",
"line_count": 765,
"max_line_length": 119,
"avg_line_length": 36.712418300653596,
"alnum_prop": 0.5243724408047,
"repo_name": "dmnfarrell/peat",
"id": "8ed21ed04955dd21f4c3754430435b5092cb9811",
"size": "29021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PEATDB/plugins/VantHoffAnalysis.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "243"
},
{
"name": "C",
"bytes": "744763"
},
{
"name": "C++",
"bytes": "999138"
},
{
"name": "CSS",
"bytes": "10879"
},
{
"name": "Gnuplot",
"bytes": "311"
},
{
"name": "JavaScript",
"bytes": "60380"
},
{
"name": "Makefile",
"bytes": "12428"
},
{
"name": "Mathematica",
"bytes": "964"
},
{
"name": "Matlab",
"bytes": "820"
},
{
"name": "Mercury",
"bytes": "26238794"
},
{
"name": "PHP",
"bytes": "92905"
},
{
"name": "Python",
"bytes": "5466696"
},
{
"name": "Shell",
"bytes": "2984"
}
],
"symlink_target": ""
}
|
from test.test_support import have_unicode, run_unittest
import unittest
class base_set:
def __init__(self, el):
self.el = el
class set(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
class TestContains(unittest.TestCase):
def test_common_tests(self):
a = base_set(1)
b = set(1)
c = seq(1)
self.assertIn(1, b)
self.assertNotIn(0, b)
self.assertIn(1, c)
self.assertNotIn(0, c)
self.assertRaises(TypeError, lambda: 1 in a)
self.assertRaises(TypeError, lambda: 1 not in a)
# test char in string
self.assertIn('c', 'abc')
self.assertNotIn('d', 'abc')
self.assertIn('', '')
self.assertIn('', 'abc')
self.assertRaises(TypeError, lambda: None in 'abc')
if have_unicode:
def test_char_in_unicode(self):
self.assertIn('c', unicode('abc'))
self.assertNotIn('d', unicode('abc'))
self.assertIn('', unicode(''))
self.assertIn(unicode(''), '')
self.assertIn(unicode(''), unicode(''))
self.assertIn('', unicode('abc'))
self.assertIn(unicode(''), 'abc')
self.assertIn(unicode(''), unicode('abc'))
self.assertRaises(TypeError, lambda: None in unicode('abc'))
# test Unicode char in Unicode
self.assertIn(unicode('c'), unicode('abc'))
self.assertNotIn(unicode('d'), unicode('abc'))
# test Unicode char in string
self.assertIn(unicode('c'), 'abc')
self.assertNotIn(unicode('d'), 'abc')
def test_builtin_sequence_types(self):
# a collection of tests on builtin sequence types
a = range(10)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
a = tuple(a)
for i in a:
self.assertIn(i, a)
self.assertNotIn(16, a)
self.assertNotIn(a, a)
class Deviant1:
"""Behaves strangely when compared
This class is designed to make sure that the contains code
works when the list is modified during the check.
"""
aList = range(15)
def __cmp__(self, other):
if other == 12:
self.aList.remove(12)
self.aList.remove(13)
self.aList.remove(14)
return 1
self.assertNotIn(Deviant1(), Deviant1.aList)
class Deviant2:
"""Behaves strangely when compared
This class raises an exception during comparison. That in
turn causes the comparison to fail with a TypeError.
"""
def __cmp__(self, other):
if other == 4:
raise RuntimeError, "gotcha"
try:
self.assertNotIn(Deviant2(), a)
except TypeError:
pass
def test_main():
run_unittest(TestContains)
if __name__ == '__main__':
test_main()
|
{
"content_hash": "cae59ebe40194de4867914334d370dd0",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 72,
"avg_line_length": 29.405405405405407,
"alnum_prop": 0.5122549019607843,
"repo_name": "ktan2020/legacy-automation",
"id": "960b6c26820b2724d636416de984233cbe86b4e4",
"size": "3264",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "win/Lib/test/test_contains.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "913"
},
{
"name": "Ada",
"bytes": "289"
},
{
"name": "Assembly",
"bytes": "687"
},
{
"name": "Boo",
"bytes": "540"
},
{
"name": "C",
"bytes": "40116"
},
{
"name": "C#",
"bytes": "474"
},
{
"name": "C++",
"bytes": "393"
},
{
"name": "CSS",
"bytes": "70883"
},
{
"name": "ColdFusion",
"bytes": "1012"
},
{
"name": "Common Lisp",
"bytes": "1034"
},
{
"name": "D",
"bytes": "1858"
},
{
"name": "Eiffel",
"bytes": "426"
},
{
"name": "Erlang",
"bytes": "9243"
},
{
"name": "FORTRAN",
"bytes": "1810"
},
{
"name": "Forth",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "2366"
},
{
"name": "Haskell",
"bytes": "816"
},
{
"name": "Haxe",
"bytes": "455"
},
{
"name": "Java",
"bytes": "1155"
},
{
"name": "JavaScript",
"bytes": "69444"
},
{
"name": "Lua",
"bytes": "795"
},
{
"name": "Matlab",
"bytes": "1278"
},
{
"name": "OCaml",
"bytes": "350"
},
{
"name": "Objective-C++",
"bytes": "885"
},
{
"name": "PHP",
"bytes": "1411"
},
{
"name": "Pascal",
"bytes": "388"
},
{
"name": "Perl",
"bytes": "252651"
},
{
"name": "Pike",
"bytes": "589"
},
{
"name": "Python",
"bytes": "42085780"
},
{
"name": "R",
"bytes": "1156"
},
{
"name": "Ruby",
"bytes": "480"
},
{
"name": "Scheme",
"bytes": "282"
},
{
"name": "Shell",
"bytes": "30518"
},
{
"name": "Smalltalk",
"bytes": "926"
},
{
"name": "Squirrel",
"bytes": "697"
},
{
"name": "Stata",
"bytes": "302"
},
{
"name": "SystemVerilog",
"bytes": "3145"
},
{
"name": "Tcl",
"bytes": "1039"
},
{
"name": "TeX",
"bytes": "1746"
},
{
"name": "VHDL",
"bytes": "985"
},
{
"name": "Vala",
"bytes": "664"
},
{
"name": "Verilog",
"bytes": "439"
},
{
"name": "Visual Basic",
"bytes": "2142"
},
{
"name": "XSLT",
"bytes": "152770"
},
{
"name": "ooc",
"bytes": "890"
},
{
"name": "xBase",
"bytes": "769"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.