text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from __future__ import print_function
import bitstring
import errno
import glob
import io
import os
import sys
from signal import signal, SIG_DFL, SIGINT
import time
import usb
signal(SIGINT, SIG_DFL)
# make sure we're using >=pyusb-1
if usb.version_info[0] < 1:
print("pyusb-1 or newer is required")
sys.exit(1)
# look for m1k
m1k = usb.core.find(idVendor=0x064b, idProduct=0x784c)
if m1k is not None:
print("m1k device found, forcing into command mode")
try:
m1k.ctrl_transfer(0x40, 0xBB)
except usb.core.USBError as e:
if e.errno != errno.EIO:
raise
# wait for the device to be re-enumerated
time.sleep(1)
# look for m1k in programming mode
dev = usb.core.find(idVendor=0x03eb, idProduct=0x6124)
if dev is None:
print(
"no device found, make sure an m1k is plugged in and "
"if necessary force it into command mode")
sys.exit(1)
try:
dev.detach_kernel_driver(0)
dev.detach_kernel_driver(1)
except:
pass
try:
dev.set_configuration(1)
except usb.core.USBError:
print('error configuring device, trying unplugging and plugging it back in')
sys.exit(1)
regBase = 0x400e0800
flashBase = 0x80000
offset = 0
def getStr():
return ''.join(map(chr, dev.read(0x82, 512, 1)))
def putStr(x):
return dev.write(0x01, x, 1)
print("please wait, flashing device firmware")
# erase flash
putStr("W400E0804,5A000005#")
time.sleep(0.1)
getStr()
# check if flash is erased
putStr("w400E0808,4#")
time.sleep(0.01)
getStr()
getStr()
getStr()
try:
firmware_file = sys.argv[1]
except IndexError:
# fallback to current dir
firmware_file = './m1000.bin'
if not os.path.exists(firmware_file):
print("firmware file doesn't exist: {}".format(firmware_file))
sys.exit(1)
# read in firmware file
raw = io.open(firmware_file, mode='rb').read()
raw += b'\x00'*(256-len(raw)%256)
fw = bitstring.ConstBitStream(bytes=raw)
# write each word
page = 0
for pos in range(0, int(fw.length/8), 4):
fw.bytepos = pos
addr = hex(flashBase+pos).lstrip("0x").rstrip("L").zfill(8)
data = hex(fw.peek("<L")).lstrip("0x").rstrip("L").zfill(8)
cmd = ("W"+addr+","+data+"#").upper()
try:
putStr(cmd)
getStr()
getStr()
except:
print('error at ' + cmd)
quit()
# if at end of page
if pos & 0xFC == 0xFC:
# write page
cmd = "W400E0804,5A00"+hex(page).lstrip("0x").zfill(2)+"03#"
putStr(cmd)
time.sleep(0.01)
getStr()
getStr()
# check that page is written
putStr("w400E0808,4#")
time.sleep(0.01)
getStr()
getStr()
page_status = getStr().strip()
if not page_status or int(page_status, 16) != 1:
print('error writing page {}'.format(page))
sys.exit(1)
page += 1
# disable SAM-BA
putStr("W400E0804,5A00010B#")
getStr()
getStr()
# jump to flash
putStr("G00000000#")
getStr()
print("successfully updated firmware, please unplug and replug the device to finish the process")
|
{
"content_hash": "141834e4e3fc842cbbb88fa08ad28e0f",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 97,
"avg_line_length": 23.435114503816795,
"alnum_prop": 0.6342019543973941,
"repo_name": "analogdevicesinc/m1k-fw",
"id": "118b8f6affbc21c9ac4d496386f62ad8af1a6e05",
"size": "3323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/sam-ba.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1841721"
},
{
"name": "C++",
"bytes": "109669"
},
{
"name": "Makefile",
"bytes": "5726"
},
{
"name": "Python",
"bytes": "7830"
},
{
"name": "Shell",
"bytes": "395"
}
],
"symlink_target": ""
}
|
from jax._src.distributed import (initialize, shutdown)
|
{
"content_hash": "d3ebc5d1f75cc904ad3488295b67aac7",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 55,
"avg_line_length": 56,
"alnum_prop": 0.8035714285714286,
"repo_name": "google/jax",
"id": "284ae6f95f48fb828a6001a3d532b32bfb4a0db9",
"size": "638",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "jax/distributed.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "25710"
},
{
"name": "C++",
"bytes": "233622"
},
{
"name": "Dockerfile",
"bytes": "1514"
},
{
"name": "Jupyter Notebook",
"bytes": "98807"
},
{
"name": "Python",
"bytes": "7395044"
},
{
"name": "Shell",
"bytes": "17273"
},
{
"name": "Starlark",
"bytes": "88279"
}
],
"symlink_target": ""
}
|
from django.test import SimpleTestCase
from .utils import render, setup
class CommentSyntaxTests(SimpleTestCase):
@setup({'comment-syntax01': '{# this is hidden #}hello'})
def test_comment_syntax01(self):
output = render('comment-syntax01')
self.assertEqual(output, 'hello')
@setup({'comment-syntax02': '{# this is hidden #}hello{# foo #}'})
def test_comment_syntax02(self):
output = render('comment-syntax02')
self.assertEqual(output, 'hello')
@setup({'comment-syntax03': 'foo{# {% if %} #}'})
def test_comment_syntax03(self):
output = render('comment-syntax03')
self.assertEqual(output, 'foo')
@setup({'comment-syntax04': 'foo{# {% endblock %} #}'})
def test_comment_syntax04(self):
output = render('comment-syntax04')
self.assertEqual(output, 'foo')
@setup({'comment-syntax05': 'foo{# {% somerandomtag %} #}'})
def test_comment_syntax05(self):
output = render('comment-syntax05')
self.assertEqual(output, 'foo')
@setup({'comment-syntax06': 'foo{# {% #}'})
def test_comment_syntax06(self):
output = render('comment-syntax06')
self.assertEqual(output, 'foo')
@setup({'comment-syntax07': 'foo{# %} #}'})
def test_comment_syntax07(self):
output = render('comment-syntax07')
self.assertEqual(output, 'foo')
@setup({'comment-syntax08': 'foo{# %} #}bar'})
def test_comment_syntax08(self):
output = render('comment-syntax08')
self.assertEqual(output, 'foobar')
@setup({'comment-syntax09': 'foo{# {{ #}'})
def test_comment_syntax09(self):
output = render('comment-syntax09')
self.assertEqual(output, 'foo')
@setup({'comment-syntax10': 'foo{# }} #}'})
def test_comment_syntax10(self):
output = render('comment-syntax10')
self.assertEqual(output, 'foo')
@setup({'comment-syntax11': 'foo{# { #}'})
def test_comment_syntax11(self):
output = render('comment-syntax11')
self.assertEqual(output, 'foo')
@setup({'comment-syntax12': 'foo{# } #}'})
def test_comment_syntax12(self):
output = render('comment-syntax12')
self.assertEqual(output, 'foo')
@setup({'comment-tag01': '{% comment %}this is hidden{% endcomment %}hello'})
def test_comment_tag01(self):
output = render('comment-tag01')
self.assertEqual(output, 'hello')
@setup({'comment-tag02': '{% comment %}this is hidden{% endcomment %}'
'hello{% comment %}foo{% endcomment %}'})
def test_comment_tag02(self):
output = render('comment-tag02')
self.assertEqual(output, 'hello')
@setup({'comment-tag03': 'foo{% comment %} {% if %} {% endcomment %}'})
def test_comment_tag03(self):
output = render('comment-tag03')
self.assertEqual(output, 'foo')
@setup({'comment-tag04': 'foo{% comment %} {% endblock %} {% endcomment %}'})
def test_comment_tag04(self):
output = render('comment-tag04')
self.assertEqual(output, 'foo')
@setup({'comment-tag05': 'foo{% comment %} {% somerandomtag %} {% endcomment %}'})
def test_comment_tag05(self):
output = render('comment-tag05')
self.assertEqual(output, 'foo')
|
{
"content_hash": "09c059f56d6c0ad240897bd16721f41e",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 86,
"avg_line_length": 35.869565217391305,
"alnum_prop": 0.5987878787878788,
"repo_name": "andyzsf/django",
"id": "35c720efd7906d3acbaa57f6b91c455fb6099afd",
"size": "3300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/template_tests/syntax_tests/test_comment.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "53429"
},
{
"name": "JavaScript",
"bytes": "103687"
},
{
"name": "Makefile",
"bytes": "5553"
},
{
"name": "Python",
"bytes": "10278616"
},
{
"name": "Shell",
"bytes": "10452"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from chaco.default_colormaps import *
|
{
"content_hash": "1487d46a82fc240307f4c293fb39ff77",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 38,
"avg_line_length": 38.5,
"alnum_prop": 0.7922077922077922,
"repo_name": "enthought/etsproxy",
"id": "c041b32faf4383229b020f92d27a9ede13a092ff",
"size": "92",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/chaco/default_colormaps.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "363714"
}
],
"symlink_target": ""
}
|
from queue import Empty, Queue
from threading import Lock
from time import time
import pkg_resources
cb_version = pkg_resources.get_distribution("couchbase").version
if cb_version[0] == '3':
from datetime import timedelta
from couchbase.cluster import (
Cluster,
ClusterOptions,
ClusterTimeoutOptions,
)
from couchbase_core.cluster import PasswordAuthenticator
else:
from couchbase.bucket import Bucket
class ClientUnavailableError(Exception):
pass
class BucketWrapper:
TIMEOUT = 120
def __init__(self, host, bucket, password, quiet=True, port=8091):
connection_string = 'couchbase://{}:{}/{}?password={}'\
.format(host, port, bucket, password)
self.client = Bucket(connection_string=connection_string, quiet=quiet)
self.client.timeout = self.TIMEOUT
self.use_count = 0
self.use_time = 0
self.last_use_time = 0
def start_using(self):
self.last_use_time = time()
def stop_using(self):
self.use_time += time() - self.last_use_time
self.use_count += 1
def query(self, ddoc, view, key):
return self.client.query(ddoc, view, key=key)
def set(self, key, doc):
self.client.set(key, doc)
def delete(self, key):
self.client.delete(key)
def upsert(self, key, doc):
self.client.upsert(key, doc)
def endure(self, key, persist_to, replicate_to, interval, timeout=120):
self.client.endure(key,
persist_to=persist_to,
replicate_to=replicate_to,
interval=interval,
timeout=timeout)
class CollectionsWrapper:
TIMEOUT = 120
def __init__(self, host, bucket, username, password, quiet=True, port=8091):
connection_string = 'couchbase://{}?password={}'.format(host, password)
pass_auth = PasswordAuthenticator(username, password)
timeout = ClusterTimeoutOptions(kv_timeout=timedelta(seconds=self.TIMEOUT))
options = ClusterOptions(authenticator=pass_auth, timeout_options=timeout)
self.cluster = Cluster(connection_string=connection_string, options=options)
self.bucket = self.cluster.bucket(bucket)
self.client = self.bucket.default_collection()
self.use_count = 0
self.use_time = 0
self.last_use_time = 0
def start_using(self):
self.last_use_time = time()
def stop_using(self):
self.use_time += time() - self.last_use_time
self.use_count += 1
def query(self, ddoc, view, key):
return self.cluster.view_query(ddoc, view, key=key)
def set(self, key, doc):
self.client.insert(key, doc)
def delete(self, key):
self.client.remove(key)
class Pool:
def __init__(self, bucket, host, username, password, collections=None,
initial=10, max_clients=20, quiet=True, port=8091):
self.host = host
self.port = port
self.bucket = bucket
self.collections = collections
self.username = username
self.password = password
self.quiet = quiet
self._q = Queue()
self._l = []
self._cur_clients = 0
self._max_clients = max_clients
self._lock = Lock()
for x in range(initial):
self._q.put(self._make_client())
self._cur_clients += 1
def _make_client(self):
if self.collections or cb_version[0] == '3':
client = CollectionsWrapper(
self.host, self.bucket, self.username,
self.password, self.quiet, self.port
)
else:
client = BucketWrapper(
self.host, self.bucket, self.password,
self.quiet, self.port
)
self._l.append(client)
return client
def get_client(self, initial_timeout=0.05, next_timeout=200):
try:
return self._q.get(True, initial_timeout)
except Empty:
try:
self._lock.acquire()
if self._cur_clients == self._max_clients:
raise ClientUnavailableError("Too many clients in use")
cb = self._make_client()
self._cur_clients += 1
cb.start_using()
return cb
except ClientUnavailableError as ex:
try:
return self._q.get(True, next_timeout)
except Empty:
raise ex
finally:
self._lock.release()
def release_client(self, cb):
cb.stop_using()
self._q.put(cb, True)
|
{
"content_hash": "6c2d23853027f9b0297d33bb22574ed7",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 84,
"avg_line_length": 30.393548387096775,
"alnum_prop": 0.5758862237316917,
"repo_name": "couchbase/perfrunner",
"id": "12adca8bc6f966dbdda0368cf30ae764b0efc7fd",
"size": "4711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cbagent/collectors/libstats/pool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1853"
},
{
"name": "Dockerfile",
"bytes": "2761"
},
{
"name": "Go",
"bytes": "37531"
},
{
"name": "Groovy",
"bytes": "46365"
},
{
"name": "HCL",
"bytes": "40219"
},
{
"name": "Inno Setup",
"bytes": "25281"
},
{
"name": "JavaScript",
"bytes": "14317"
},
{
"name": "Makefile",
"bytes": "2405"
},
{
"name": "Python",
"bytes": "2416900"
},
{
"name": "Ruby",
"bytes": "154"
},
{
"name": "Shell",
"bytes": "5016"
}
],
"symlink_target": ""
}
|
from bson import ObjectId
from bson.errors import InvalidId
from pymongo import MongoClient
def get_id_filter(item_id):
"""
Function that returns a dict with an ObjectId stored as value for the key '_id'
:param item_id: the id used when creating the ObjectId
:return: the resulting dict.
"""
try:
return {
'_id': ObjectId(item_id)
}
except InvalidId as _:
return None
def get_db(app):
"""
Function that creates a client for Mongo and retrieves the database based on the config vars.
:param app: the Flask app.
:return: the db object.
"""
client = MongoClient()
return client[app.config['api.vars'].db_name]
|
{
"content_hash": "1520c93607cfde758021a7baa8bdfa46",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 97,
"avg_line_length": 24.310344827586206,
"alnum_prop": 0.6425531914893617,
"repo_name": "xyder/kivy-tasks-server",
"id": "a1c4e788f914ea5f143ef83c294f77802e6a691e",
"size": "705",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "web_api/db_functions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20112"
}
],
"symlink_target": ""
}
|
"""
Use this module to get and run all tk tests.
tkinter tests should live in a package inside the directory where this file
lives, like test_tkinter.
Extensions also should live in packages following the same rule as above.
"""
import os
import sys
import unittest
import importlib
import test.support
this_dir_path = os.path.abspath(os.path.dirname(__file__))
def is_package(path):
for name in os.listdir(path):
if name in ('__init__.py', '__init__.pyc'):
return True
return False
def get_tests_modules(basepath=this_dir_path, gui=True, packages=None):
"""This will import and yield modules whose names start with test_
and are inside packages found in the path starting at basepath.
If packages is specified it should contain package names that
want their tests collected.
"""
py_ext = '.py'
for dirpath, dirnames, filenames in os.walk(basepath):
for dirname in list(dirnames):
if dirname[0] == '.':
dirnames.remove(dirname)
if is_package(dirpath) and filenames:
pkg_name = dirpath[len(basepath) + len(os.sep):].replace('/', '.')
if packages and pkg_name not in packages:
continue
filenames = filter(
lambda x: x.startswith('test_') and x.endswith(py_ext),
filenames)
for name in filenames:
try:
yield importlib.import_module(
".%s.%s" % (pkg_name, name[:-len(py_ext)]),
"tkinter.test")
except test.support.ResourceDenied:
if gui:
raise
def get_tests(text=True, gui=True, packages=None):
"""Yield all the tests in the modules found by get_tests_modules.
If nogui is True, only tests that do not require a GUI will be
returned."""
attrs = []
if text:
attrs.append('tests_nogui')
if gui:
attrs.append('tests_gui')
for module in get_tests_modules(gui=gui, packages=packages):
for attr in attrs:
for test in getattr(module, attr, ()):
yield test
if __name__ == "__main__":
test.support.run_unittest(*get_tests())
|
{
"content_hash": "0b1afcd2c35bbb201dd6ea446e38de4c",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 78,
"avg_line_length": 31.788732394366196,
"alnum_prop": 0.590607000443066,
"repo_name": "ms-iot/python",
"id": "dbe5e88c1430e9630c285f6395dc884d60035759",
"size": "2257",
"binary": false,
"copies": "9",
"ref": "refs/heads/develop",
"path": "cpython/Lib/tkinter/test/runtktests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "481852"
},
{
"name": "Batchfile",
"bytes": "35616"
},
{
"name": "C",
"bytes": "15555469"
},
{
"name": "C#",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "726292"
},
{
"name": "CSS",
"bytes": "2839"
},
{
"name": "Common Lisp",
"bytes": "24481"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "HTML",
"bytes": "130698"
},
{
"name": "JavaScript",
"bytes": "10616"
},
{
"name": "M4",
"bytes": "223087"
},
{
"name": "Makefile",
"bytes": "197108"
},
{
"name": "Objective-C",
"bytes": "2098686"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PostScript",
"bytes": "13803"
},
{
"name": "PowerShell",
"bytes": "1372"
},
{
"name": "Python",
"bytes": "24948876"
},
{
"name": "Roff",
"bytes": "254942"
},
{
"name": "Shell",
"bytes": "437386"
},
{
"name": "TeX",
"bytes": "323102"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
'''Run a perceptual diff test locally.
To run:
source ./common.sh
./dpxdt/tools/local_pdiff.py test dpxdt/tools/local_pdiff_demo
This will run the tests described in dpxdt/tools/local_pdiff_demo/*.yaml.
See those files for details.
'''
import copy
import fnmatch
import glob
import json
import logging
import os
import requests
import signal
import subprocess
import sys
import tempfile
import threading
import time
import traceback
# Local Libraries
import gflags
FLAGS = gflags.FLAGS
import pyimgur
import yaml
# Local modules
from dpxdt.client import capture_worker
from dpxdt.client import fetch_worker
from dpxdt.client import pdiff_worker
from dpxdt.client import process_worker
from dpxdt.client import timer_worker
from dpxdt.client import utils
from dpxdt.client import workers
FLAGS.SetDefault('phantomjs_binary', 'phantomjs')
FLAGS.SetDefault('phantomjs_timeout', 20)
gflags.DEFINE_boolean(
'list_tests', False,
'Set this to list the names of all tests instead of running them.')
gflags.DEFINE_string(
'test_filter', '',
'Run a subset of tests. Pass a test name to run just that test, or '
'use a * to match a set of tests. See '
'https://code.google.com/p/googletest/wiki/AdvancedGuide'
'#Running_a_Subset_of_the_Tests for full syntax.')
gflags.DEFINE_string(
'imgur_client_id', '',
'When this is set, dpxdt will upload all screenshots from failing '
'tests to Imgur using their API. This is helpful when running tests '
'on a Travis-CI worker, for instance. You must register an app with '
'Imgur to use this.')
MODES = ['test', 'update']
# global tracker
FAILED_TESTS = 0
def should_run_test(name, pattern):
'''Given a test_filter pattern and a test name, should the test be run?'''
if pattern == '': return True
def matches_any(name, parts):
for part in parts:
if fnmatch.fnmatch(name, part):
return True
return False
positive_negative = pattern.split('-')
positive = positive_negative[0]
if positive:
# There's something here -- have to match it!
parts = positive.split(':')
if not matches_any(name, parts): return False
if len(positive_negative) > 1:
negative = positive_negative[1]
parts = negative.split(':')
if matches_any(name, parts): return False
return True
class OneTestWorkflowItem(workers.WorkflowItem):
'''Runs an individual capture & pdiff (or update) based on a config.'''
def run(self, test_config, ref_dir, tmp_dir, mode, heartbeat=None, num_attempts=0):
'''Build a CaptureAndDiffWorkflowItem for a test.
Args:
test_config: See test.yaml for structure of test_config.
Returns: A CaptureAndDiffWorkflowItem
'''
assert 'name' in test_config
name = test_config['name']
if 'ref' in test_config:
# This test has different ref/run arms.
assert 'run' in test_config
arm_config = { 'name': name }
if mode == 'test':
arm_config.update(test_config['run'])
elif mode == 'update':
arm_config.update(test_config['ref'])
test_config = arm_config
assert 'url' in test_config
test_dir = tempfile.mkdtemp(dir=tmp_dir)
log_file = os.path.join(test_dir, 'log.txt')
output_path = os.path.join(test_dir, 'screenshot.png')
logging.info('Test config:\n%s', json.dumps(test_config, indent=2))
capture_config = copy.deepcopy(test_config.get('config', {}))
capture_config['targetUrl'] = test_config['url']
config_file = os.path.join(test_dir, 'config.json')
json.dump(capture_config, open(config_file, 'w'), indent=2)
ref_path = os.path.join(ref_dir, '%s.png' % name)
if mode == 'test':
assert os.path.exists(ref_path), (
'Reference image %s does not exist. '
'Try running in update mode.' % ref_path)
elif mode == 'update':
output_path = ref_path
ref_path = None
else:
raise ValueError('Invalid mode %s' % mode)
class NamedHeartbeat(workers.WorkflowItem):
def run(self, message):
yield heartbeat('%s: %s' % (name, message))
try:
yield CaptureAndDiffWorkflowItem(
name, log_file, config_file, output_path, ref_path,
heartbeat=NamedHeartbeat)
except capture_worker.CaptureFailedError, e:
if num_attempts >= e.max_attempts:
yield heartbeat('Unable to capture screenshot after %d tries.' % num_attempts)
raise e
else:
num_attempts += 1
yield heartbeat('Capture failed, retrying (%d)' % num_attempts)
yield OneTestWorkflowItem(test_config, ref_dir, tmp_dir, mode,
heartbeat=heartbeat, num_attempts=num_attempts)
class CaptureAndDiffWorkflowItem(workers.WorkflowItem):
def run(self, name, log_file, config_file, output_path, ref_path, heartbeat=None):
yield heartbeat('Running webpage capture process')
yield heartbeat(' Logging to %s' % log_file)
capture_failed = True
failure_reason = None
try:
returncode = yield capture_worker.CaptureWorkflow(log_file, config_file, output_path)
except (process_worker.TimeoutError, OSError), e:
failure_reason = str(e)
else:
capture_failed = returncode != 0
failure_reason = 'returncode=%s' % returncode
if capture_failed:
raise capture_worker.CaptureFailedError(
FLAGS.capture_task_max_attempts,
failure_reason)
if ref_path is None:
yield heartbeat('Updated %s' % output_path)
return # update mode
# TODO: consolidate this code w/ DoPdiffQueueWorkflow.run
ref_resized_path = os.path.join(os.path.dirname(output_path), 'ref_resized')
diff_path = os.path.join(os.path.dirname(output_path), 'diff.png')
max_attempts = FLAGS.pdiff_task_max_attempts
yield heartbeat('Resizing reference image')
returncode = yield pdiff_worker.ResizeWorkflow(
log_file, ref_path, output_path, ref_resized_path)
if returncode != 0:
raise pdiff_worker.PdiffFailedError(
max_attempts,
'Could not resize reference image to size of new image')
yield heartbeat('Running perceptual diff process')
returncode = yield pdiff_worker.PdiffWorkflow(
log_file, ref_resized_path, output_path, diff_path)
# ImageMagick returns 1 if the images are different and 0 if
# they are the same, so the return code is a bad judge of
# successfully running the diff command. Instead we need to check
# the output text.
diff_failed = True
# Check for a successful run or a known failure.
distortion = None
if os.path.isfile(log_file):
log_data = open(log_file).read()
if 'all: 0 (0)' in log_data:
diff_path = None
diff_failed = False
elif 'image widths or heights differ' in log_data:
# Give up immediately
max_attempts = 1
else:
# Try to find the image magic normalized root square
# mean and grab the first one.
r = pdiff_worker.DIFF_REGEX.findall(log_data)
if len(r) > 0:
diff_failed = False
distortion = r[0]
if diff_failed:
raise pdiff_worker.PdiffFailedError(
max_attempts,
'Comparison failed. returncode=%r' % returncode)
else:
if distortion:
print '%s failed' % name
print ' %s distortion' % distortion
print ' Ref: %s' % self.maybe_imgur(ref_resized_path)
print ' Run: %s' % self.maybe_imgur(output_path)
print ' Diff: %s' % self.maybe_imgur(diff_path)
# convenience line for copy/pasting
print ' (all): %s/{%s}' % (
os.path.dirname(output_path),
','.join(map(os.path.basename,
[ref_resized_path, output_path, diff_path])))
global FAILED_TESTS
FAILED_TESTS += 1
else:
print '%s passed (no diff)' % name
# TODO: delete temp files
def maybe_imgur(self, path):
'''Uploads a file to imgur if requested via command line flags.
Returns either "path" or "path url" depending on the course of action.
'''
if not FLAGS.imgur_client_id:
return path
im = pyimgur.Imgur(FLAGS.imgur_client_id)
uploaded_image = im.upload_image(path)
return '%s %s' % (path, uploaded_image.link)
class SetupStep(object):
'''Logic for running and finishing the setup step of a pdiff test.'''
def __init__(self, config, tmp_dir):
'''Config is the top-level test config YAML object.'''
self._config = config
self._setup = config.get('setup')
self._tmp_dir = tmp_dir
self._setup_proc = None
def run(self):
if not self._setup: return
# Note: we cannot use ProcessWorkflow here because the setup script
# is not expected to terminate (it typically spawns a long-lived server).
setup_file = os.path.join(self._tmp_dir, 'setup.sh')
log_file = os.path.join(self._tmp_dir, 'setup.log')
logging.info('Executing setup step: %s', setup_file)
open(setup_file, 'w').write(self._setup)
# If the shell script launches its own subprocesses (e.g. servers),
# then these will become orphans if we send SIGTERM to setup_proc. In
# order to avoid this, we make the shell script its own process group.
# See http://stackoverflow.com/a/4791612/388951
with open(log_file, 'a') as output_file:
self._setup_proc = subprocess.Popen(['bash', setup_file],
stderr=subprocess.STDOUT,
stdout=output_file,
close_fds=True,
preexec_fn=os.setsid)
return {'script': setup_file, 'log': log_file}
def terminate(self):
if not self._setup_proc: return
if self._setup_proc.pid > 0:
# TODO: send SIGKILL after 5 seconds?
os.killpg(self._setup_proc.pid, signal.SIGTERM)
self._setup_proc.wait()
class WrappedProcessWorkflowItem(process_worker.ProcessWorkflow):
'''A ProcessWorkflow which can be yielded inline.'''
def __init__(self, log_path, args, timeout_seconds=30):
process_worker.ProcessWorkflow.__init__(
self, log_path, timeout_seconds=timeout_seconds)
self._args = args
def get_args(self):
return self._args
class WaitForUrlWorkflowItem(workers.WorkflowItem):
'''Waits for an URL to resolve, with a timeout.'''
def run(self, tmp_dir, waitfor, heartbeat=None, start_time=None):
assert 'url' in waitfor
timeout = waitfor.get('timeout_secs', 10)
if not start_time:
start_time = time.time()
class NotReadyError(Exception):
pass
try:
url = waitfor['url']
r = requests.head(url, allow_redirects=True)
if r.status_code != 200:
yield heartbeat('Request for %s failed (%d)' % (url, r.status_code))
raise NotReadyError()
yield heartbeat('Request for %s succeeded, continuing with tests...' % url)
return
except (requests.ConnectionError, NotReadyError):
now = time.time()
if now - start_time >= timeout:
raise process_worker.TimeoutError()
yield timer_worker.TimerItem(0.5) # wait 500ms between checks
yield WaitForUrlWorkflowItem(tmp_dir, waitfor, heartbeat, start_time)
class WaitForWorkflowItem(workers.WorkflowItem):
'''This performs the "waitFor" step specified in a test config.'''
def run(self, config, tmp_dir, heartbeat):
waitfor = config.get('waitFor')
if isinstance(waitfor, basestring):
waitfor_file = os.path.join(tmp_dir, 'waitfor.sh')
log_file = os.path.join(tmp_dir, 'waitfor.log')
logging.info('Executing waitfor step: %s', waitfor_file)
try:
yield WrappedProcessWorkflowItem(log_file, ['bash', waitfor_file])
except subprocess.CalledProcessError:
yield heartbeat('waitFor returned error code\nSee %s' % log_file)
raise
elif 'url' in waitfor:
yield WaitForUrlWorkflowItem(tmp_dir, waitfor, heartbeat)
class RunAllTestSuitesWorkflowItem(workers.WorkflowItem):
'''Load test YAML files and add them to the work queue.'''
def run(self, config_dir, mode):
configs = glob.glob(os.path.join(config_dir, '*.yaml'))
if not configs:
raise ValueError('No yaml files found in %s' % config_dir)
heartbeat=workers.PrintWorkflow
for config_file in configs:
config = yaml.load(open(config_file))
assert 'tests' in config
if FLAGS.list_tests:
print '%s:' % config_file
for test in config['tests']:
assert 'name' in test
print ' %s' % test['name']
else:
yield RunTestSuiteWorkflowItem(config_dir, config, mode, heartbeat)
class RunTestSuiteWorkflowItem(workers.WorkflowItem):
'''Run a single YAML file's worth of tests.'''
def run(self, config_dir, config, mode, heartbeat):
tmp_dir = tempfile.mkdtemp()
yield heartbeat('Running setup step')
setup = SetupStep(config, tmp_dir)
setup_files = setup.run()
yield heartbeat(' logging to %s' % setup_files['log'])
try:
if config.get('waitFor'):
try:
yield WaitForWorkflowItem(config, tmp_dir, heartbeat)
except process_worker.TimeoutError:
# The raw exception has an excessively long stack trace.
# This at least adds some helpful context to the end.
sys.stderr.write('Timed out on waitFor step.\n')
return
for test in config['tests']:
assert 'name' in test
name = test['name']
if should_run_test(name, FLAGS.test_filter):
yield OneTestWorkflowItem(test, config_dir, tmp_dir, mode,
heartbeat=heartbeat)
else:
logging.info('Skipping %s due to --test_filter=%s',
name, FLAGS.test_filter)
finally:
setup.terminate() # kill server from the setup step.
class RepetitiveLogFilterer(object):
'''Suppress repeated log entries from the same line in the same file.'''
def __init__(self):
self.last_source = None
def filter(self, record):
if FLAGS.verbose:
return True
source = '%s:%s' % (record.filename, record.lineno)
if source == self.last_source:
return False
self.last_source = source
return True
class CompactExceptionLogger(logging.Formatter):
def formatException(self, ei):
# Like logging.Formatter.formatException, but without the stack trace.
if FLAGS.verbose:
return super(CompactExceptionLogger, self).formatException(ei)
else:
return '\n'.join(traceback.format_exception_only(ei[0], ei[1]))
def usage(short=False):
sys.stderr.write('Usage: %s [update|test] <testdir>\n' % sys.argv[0])
if not short:
sys.stderr.write('%s\n' % FLAGS)
def main(argv):
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
sys.stderr.write('%s\n' % e)
usage()
sys.exit(1)
if len(argv) < 3:
sys.stderr.write('Too few arguments\n')
usage(short=True)
sys.exit(1)
mode = argv[1]
assert mode in MODES, 'Invalid mode: %s (expected %r)' % (mode, MODES)
config_dir = argv[2]
assert os.path.isdir(config_dir), 'Expected directory, got %s' % config_dir
utils.verify_binary('phantomjs_binary', ['--version'])
utils.verify_binary('pdiff_compare_binary', ['--version'])
utils.verify_binary('pdiff_composite_binary', ['--version'])
assert os.path.exists(FLAGS.phantomjs_script)
logging.basicConfig()
logging.getLogger().addFilter(RepetitiveLogFilterer())
logging.getLogger().handlers[0].setFormatter(CompactExceptionLogger())
if FLAGS.verbose:
logging.getLogger().setLevel(logging.DEBUG)
coordinator = workers.get_coordinator()
timer_worker.register(coordinator)
global FAILED_TESTS
FAILED_TESTS = 0
item = RunAllTestSuitesWorkflowItem(config_dir, mode)
item.root = True
coordinator.input_queue.put(item, mode)
coordinator.start()
coordinator.wait_one()
coordinator.stop()
coordinator.join()
if mode == 'test':
if FAILED_TESTS > 0:
sys.stderr.write('%d test(s) failed.\n' % FAILED_TESTS)
sys.exit(1)
else:
sys.stderr.write('All tests passed!\n')
sys.exit(0)
def run():
# (intended to be run from package)
FLAGS.phantomjs_script = os.path.join(
os.path.dirname(__file__), '..', 'client', 'capture.js')
main(sys.argv)
if __name__ == '__main__':
run()
|
{
"content_hash": "c0f98133cb0f34c0de1c5daf0a5c53bb",
"timestamp": "",
"source": "github",
"line_count": 517,
"max_line_length": 97,
"avg_line_length": 34.72147001934236,
"alnum_prop": 0.5939501977605705,
"repo_name": "weeksghost/dpxdt",
"id": "bd906254c8a94603f847df9b02571c8f2483e3e0",
"size": "17973",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dpxdt/tools/local_pdiff.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4901"
},
{
"name": "HTML",
"bytes": "66831"
},
{
"name": "JavaScript",
"bytes": "12602"
},
{
"name": "Makefile",
"bytes": "267"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "296972"
},
{
"name": "Shell",
"bytes": "1585"
}
],
"symlink_target": ""
}
|
from plenum.common.constants import DOMAIN_LEDGER_ID
from plenum.common.messages.fields import BlsMultiSignatureValueField, TimestampField
from plenum.common.util import get_utc_epoch
from plenum.test.input_validation.utils import b58_by_len
validator = BlsMultiSignatureValueField()
state_root_hash = b58_by_len(32)
pool_state_root_hash = b58_by_len(32)
txn_root_hash = b58_by_len(32)
ledger_id = DOMAIN_LEDGER_ID
timestamp = get_utc_epoch()
def test_valid():
assert not validator.validate((ledger_id,
state_root_hash,
pool_state_root_hash,
txn_root_hash,
timestamp))
def test_invalid_ledger_id():
assert validator.validate((100,
state_root_hash,
pool_state_root_hash,
txn_root_hash,
timestamp))
def test_invalid_state_root_hash():
assert validator.validate((ledger_id,
b58_by_len(31),
pool_state_root_hash,
txn_root_hash,
timestamp))
def test_invalid_pool_state_root_hash():
assert validator.validate((ledger_id,
state_root_hash,
b58_by_len(31),
txn_root_hash,
timestamp))
def test_invalid_txn_root_hash():
assert validator.validate((ledger_id,
state_root_hash,
pool_state_root_hash,
b58_by_len(31),
timestamp))
def test_invalid_timestamp():
assert validator.validate((ledger_id,
state_root_hash,
pool_state_root_hash,
txn_root_hash,
TimestampField._oldest_time - 1))
|
{
"content_hash": "23568ff12be46d543735f35e4fcb0023",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 85,
"avg_line_length": 35.03389830508475,
"alnum_prop": 0.4765360425737784,
"repo_name": "evernym/plenum",
"id": "731155a58f531a2e23fb4ead35d42b19220e744c",
"size": "2067",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plenum/test/input_validation/fields_validation/test_bls_multisig_value_field.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1537915"
}
],
"symlink_target": ""
}
|
from oslo_concurrency import processutils
from oslo_config import cfg as openstack_cfg
from trove.cmd.common import with_initialize
opts = [
openstack_cfg.BoolOpt('fork', short='f', default=False, dest='fork'),
openstack_cfg.StrOpt('pid-file', default='.pid'),
openstack_cfg.StrOpt('override-logfile', default=None),
]
def setup_logging(conf):
if conf.override_logfile:
conf.use_stderr = False
conf.log_file = conf.override_logfile
@with_initialize(extra_opts=opts, pre_logging=setup_logging)
def main(conf):
if conf.fork:
pid = os.fork()
if pid == 0:
start_server(conf)
else:
print("Starting server:%s" % pid)
pid_file = CONF.pid_file
with open(pid_file, 'w') as f:
f.write(str(pid))
else:
start_server(conf)
def start_fake_taskmanager(conf):
topic = conf.taskmanager_queue
from trove.common.rpc import service as rpc_service
from trove.common.rpc import version as rpc_version
taskman_service = rpc_service.RpcService(
topic=topic, rpc_api_version=rpc_version.RPC_API_VERSION,
manager='trove.taskmanager.manager.Manager')
taskman_service.start()
def start_server(conf):
from trove.common import wsgi
conf_file = conf.find_file(conf.api_paste_config)
workers = conf.trove_api_workers or processutils.get_worker_count()
launcher = wsgi.launch('trove', conf.bind_port or 8779, conf_file,
workers=workers)
start_fake_taskmanager(conf)
launcher.wait()
|
{
"content_hash": "68ada6967e5b4c14e163db9ecd831519",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 73,
"avg_line_length": 30.442307692307693,
"alnum_prop": 0.6538218572331017,
"repo_name": "fabian4/trove",
"id": "30ffadaefefd2c64628bd70304a8653b64094bec",
"size": "2218",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "trove/cmd/fakemode.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "88"
},
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60526"
},
{
"name": "Python",
"bytes": "3297002"
},
{
"name": "Shell",
"bytes": "15239"
},
{
"name": "XSLT",
"bytes": "50542"
}
],
"symlink_target": ""
}
|
""" Configuration utilities.
"""
# Copyright © 2015 Jürgen Hermann <jh@web.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals, print_function
import os
import re
import sys
from rudiments.reamed.click import Configuration # noqa pylint: disable=unused-import
from ._compat import iteritems
# Determine path this command is located in (installed to)
try:
CLI_PATH = sys.modules['__main__'].__file__
except (KeyError, AttributeError):
CLI_PATH = __file__
CLI_PATH = os.path.dirname(CLI_PATH)
if CLI_PATH.endswith('/bin'):
CLI_PATH = CLI_PATH[:-4]
CLI_PATH = re.sub('^' + os.path.expanduser('~'), '~', CLI_PATH)
# Extended version info for use by `click.version_option`
VERSION_INFO = '%(prog)s %(version)s from {} [Python {}]'.format(CLI_PATH, ' '.join(sys.version.split()[:1]),)
# These will be filled by `__main__`
APP_NAME = None
cli = None # pylint: disable=invalid-name
def version_info(ctx=None):
"""Return version information just like --version does."""
from . import __version__
prog = ctx.find_root().info_name if ctx else APP_NAME
version = __version__
try:
import pkg_resources
except ImportError:
pass
else:
for dist in iter(pkg_resources.working_set):
scripts = dist.get_entry_map().get('console_scripts') or {}
for _, entry_point in iteritems(scripts):
if entry_point.module_name == (__package__ + '.__main__'):
version = dist.version
break
return VERSION_INFO % dict(prog=prog, version=version)
def envvar(name, default=None):
"""Return an environment variable specific for this application (using a prefix)."""
varname = (APP_NAME + '-' + name).upper().replace('-', '_')
return os.environ.get(varname, default)
|
{
"content_hash": "6138ae02fa5387369d3c0a98aec13ca8",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 110,
"avg_line_length": 34.661764705882355,
"alnum_prop": 0.6682223165040305,
"repo_name": "jhermann/gh-commander",
"id": "2096c1115106b344d561ef66958518eb79826058",
"size": "2418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/gh_commander/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "62686"
}
],
"symlink_target": ""
}
|
"""Tokenization help for Python programs.
This module exports a function called 'tokenize()' that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF) and a "token-eater"
function which is called once for each token found. The latter function is
passed the token type, a string containing the token, the starting and
ending (row, column) coordinates of the token, and the original line. It is
designed to match the working of the Python tokenizer exactly, except that
it produces COMMENT tokens for comments and gives type OP for all operators."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = \
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
import string, re
from token import *
import token
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"]
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
N_TOKENS += 2
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return apply(group, choices) + '*'
def maybe(*choices): return apply(group, choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
Octnumber = r'0[0-7]*[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Hexnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'[1-9]\d*' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'0[jJ]', r'[1-9]\d*[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
'r': None, 'R': None, 'u': None, 'U': None}
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
def tokenize_loop(readline, tokeneater):
lnum = parenlev = continued = 0
namechars, numchars = string.letters + '_', string.digits
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
line = readline()
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
tokeneater(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
tokeneater(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ': column = column + 1
elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
elif line[pos] == '\f': column = 0
else: break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
tokeneater((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
tokeneater(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
indents = indents[:-1]
tokeneater(DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
tokeneater(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
tokeneater(parenlev > 0 and NL or NEWLINE,
token, spos, epos, line)
elif initial == '#':
tokeneater(COMMENT, token, spos, epos, line)
elif token in ("'''", '"""', # triple-quoted
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""'):
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
tokeneater(STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in ("'", '"') or \
token[:2] in ("r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"') or \
token[:3] in ("ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"' ):
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
tokeneater(STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
tokeneater(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{': parenlev = parenlev + 1
elif initial in ')]}': parenlev = parenlev - 1
tokeneater(OP, token, spos, epos, line)
else:
tokeneater(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos = pos + 1
for indent in indents[1:]: # pop remaining indent levels
tokeneater(DEDENT, '', (lnum, 0), (lnum, 0), '')
tokeneater(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
else: tokenize(sys.stdin.readline)
|
{
"content_hash": "ebd2f69b9a623eba2e0be2b5222ae0f0",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 80,
"avg_line_length": 42.525,
"alnum_prop": 0.47070350774054476,
"repo_name": "Integral-Technology-Solutions/ConfigNOW-4.3",
"id": "2af595daea44425dc0a46b9717d836d8ebfd9197",
"size": "10206",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Lib/tokenize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1499"
},
{
"name": "HTML",
"bytes": "2243"
},
{
"name": "Java",
"bytes": "594"
},
{
"name": "Python",
"bytes": "2973691"
},
{
"name": "Shell",
"bytes": "5797"
}
],
"symlink_target": ""
}
|
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class MatrixJob(ScrollphatJob):
# matrix is an array of strings with 55 1 or 0
def run(self):
for matrix_string in self.options['matrix']:
matrix = self.convert_to_matrix(list(matrix_string))
self.update_matrix(matrix)
self.sleep_interval()
def update_matrix(self, matrix):
scrollphat.set_pixels(lambda x, y: matrix[y][x], True)
def convert_to_matrix(self, arr):
"""
Convert a 1 dimensional array into a 2 dimensional array.
"""
matrix_array = self.limit_array(arr)
counter = 0
matrix = []
for i in range(self.MATRIX_ROWS):
row = []
for j in range(self.MATRIX_COLS):
row.append(int(matrix_array[counter]))
counter += 1
matrix.append(row)
return matrix
def limit_array(self, arr):
"""
Ensure arr is exactly self.MATRIX_LEDS elements long.
Longer array is cut off, shorter is filled with zeroes.
"""
if len(arr) > self.MATRIX_LEDS:
return arr[:self.MATRIX_LEDS]
elif len(arr) < self.MATRIX_LEDS:
return arr + [0] * (self.MATRIX_LEDS - len(arr))
else:
return arr
|
{
"content_hash": "4a6e7aff275a9ed1bf14b3ccad60d5f4",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 65,
"avg_line_length": 33.125,
"alnum_prop": 0.5728301886792453,
"repo_name": "ollej/piapi",
"id": "a8e8edd52f042ca7ac8c47712d8e42e1380f52a5",
"size": "1325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pijobs/matrixjob.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27673"
},
{
"name": "Shell",
"bytes": "296"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2021 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import subprocess
import cocotb_test.simulator
import pytest
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.axi import AxiStreamBus, AxiStreamFrame, AxiStreamSource, AxiStreamSink
class TB(object):
def __init__(self, dut):
self.dut = dut
ports = len(dut.axis_mux_inst.s_axis_tvalid)
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.fork(Clock(dut.clk, 10, units="ns").start())
self.source = [AxiStreamSource(AxiStreamBus.from_prefix(dut, f"s{k:02d}_axis"), dut.clk, dut.rst) for k in range(ports)]
self.sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "m_axis"), dut.clk, dut.rst)
dut.enable.setimmediatevalue(0)
dut.select.setimmediatevalue(0)
def set_idle_generator(self, generator=None):
if generator:
for source in self.source:
source.set_pause_generator(generator())
def set_backpressure_generator(self, generator=None):
if generator:
self.sink.set_pause_generator(generator())
async def reset(self):
self.dut.rst.setimmediatevalue(0)
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst <= 1
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
self.dut.rst <= 0
await RisingEdge(self.dut.clk)
await RisingEdge(self.dut.clk)
async def run_test(dut, payload_lengths=None, payload_data=None, idle_inserter=None, backpressure_inserter=None, port=0):
tb = TB(dut)
id_count = 2**len(tb.source[port].bus.tid)
cur_id = 1
await tb.reset()
tb.set_idle_generator(idle_inserter)
tb.set_backpressure_generator(backpressure_inserter)
test_frames = []
dut.enable.setimmediatevalue(1)
dut.select.setimmediatevalue(port)
for test_data in [payload_data(x) for x in payload_lengths()]:
test_frame = AxiStreamFrame(test_data)
test_frame.tid = cur_id
test_frame.tdest = cur_id
test_frames.append(test_frame)
await tb.source[port].send(test_frame)
cur_id = (cur_id + 1) % id_count
for test_frame in test_frames:
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_frame.tdata
assert rx_frame.tid == test_frame.tid
assert rx_frame.tdest == test_frame.tdest
assert not rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
async def run_test_tuser_assert(dut, port=0):
tb = TB(dut)
await tb.reset()
dut.enable.setimmediatevalue(1)
dut.select.setimmediatevalue(port)
test_data = bytearray(itertools.islice(itertools.cycle(range(256)), 32))
test_frame = AxiStreamFrame(test_data, tuser=1)
await tb.source[port].send(test_frame)
rx_frame = await tb.sink.recv()
assert rx_frame.tdata == test_data
assert rx_frame.tuser
assert tb.sink.empty()
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
def cycle_pause():
return itertools.cycle([1, 1, 1, 0])
def size_list():
data_width = len(cocotb.top.m_axis_tdata)
byte_width = data_width // 8
return list(range(1, byte_width*4+1))+[512]+[1]*64
def incrementing_payload(length):
return bytearray(itertools.islice(itertools.cycle(range(256)), length))
if cocotb.SIM_NAME:
ports = len(cocotb.top.axis_mux_inst.s_axis_tvalid)
factory = TestFactory(run_test)
factory.add_option("payload_lengths", [size_list])
factory.add_option("payload_data", [incrementing_payload])
factory.add_option("idle_inserter", [None, cycle_pause])
factory.add_option("backpressure_inserter", [None, cycle_pause])
factory.add_option("port", list(range(ports)))
factory.generate_tests()
for test in [run_test_tuser_assert]:
factory = TestFactory(test)
factory.add_option("port", list(range(ports)))
factory.generate_tests()
# cocotb-test
tests_dir = os.path.dirname(__file__)
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
@pytest.mark.parametrize("data_width", [8, 16, 32])
@pytest.mark.parametrize("ports", [1, 4])
def test_axis_mux(request, ports, data_width):
dut = "axis_mux"
wrapper = f"{dut}_wrap_{ports}"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = wrapper
# generate wrapper
wrapper_file = os.path.join(tests_dir, f"{wrapper}.v")
if not os.path.exists(wrapper_file):
subprocess.Popen(
[os.path.join(rtl_dir, f"{dut}_wrap.py"), "-p", f"{ports}"],
cwd=tests_dir
).wait()
verilog_sources = [
wrapper_file,
os.path.join(rtl_dir, f"{dut}.v"),
]
parameters = {}
parameters['DATA_WIDTH'] = data_width
parameters['KEEP_ENABLE'] = int(parameters['DATA_WIDTH'] > 8)
parameters['KEEP_WIDTH'] = parameters['DATA_WIDTH'] // 8
parameters['ID_ENABLE'] = 1
parameters['ID_WIDTH'] = 8
parameters['DEST_ENABLE'] = 1
parameters['DEST_WIDTH'] = 8
parameters['USER_ENABLE'] = 1
parameters['USER_WIDTH'] = 1
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
extra_env['PORTS'] = str(ports)
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
|
{
"content_hash": "9f5386766d95d52160ad8c37bc51f5c9",
"timestamp": "",
"source": "github",
"line_count": 231,
"max_line_length": 128,
"avg_line_length": 29.493506493506494,
"alnum_prop": 0.667107001321004,
"repo_name": "alexforencich/xfcp",
"id": "12a06efa0a5efb08951b982a17eae5bee33fc288",
"size": "6835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/eth/lib/axis/tb/axis_mux/test_axis_mux.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "259479"
},
{
"name": "Python",
"bytes": "3200270"
},
{
"name": "Shell",
"bytes": "14435"
},
{
"name": "Tcl",
"bytes": "29878"
},
{
"name": "Verilog",
"bytes": "4179456"
}
],
"symlink_target": ""
}
|
import wallaby.backends.couchdb as couch
import wallaby.backends.elasticsearch as es
from wallaby.frontends.qt.baseWindow import *
from wallaby.pf.peer.searchDocument import *
import wallaby.frontends.qt.resource_rc as resource_rc
from UI_mainWindow import *
import app_rc
class MainWindow(BaseWindow, Ui_MainWindow):
def __init__(self, quitCB, options, embedded=False):
if options.db != None:
db = options.db
else:
db = "bootstrap"
BaseWindow.__init__(self, "wallaby", "inspector", options, quitCB, dbName=db, embedded=embedded)
# set up User Interface (widgets, layout...)
self.setupUi(self)
self.scrollArea.setWidgetResizable(True)
def setConnectionSettings(self, options):
if options and options.fx:
options.server = "https://relax.freshx.de"
options.couchPort = "443"
options.esPort = "443/es"
couch.Database.setURLForDatabase(self.dbName(), options.server + ":" + options.couchPort)
es.Connection.setURLForIndex(None, options.server + ':' + options.esPort)
if options and options.username != None and options.password != None:
es.Connection.setLoginForIndex(None, options.username, options.password)
def _credentialsArrived(self, pillow, feathers):
pass
|
{
"content_hash": "97615faaa54edec8039279784cbd891d",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 104,
"avg_line_length": 33.6,
"alnum_prop": 0.6711309523809523,
"repo_name": "FreshXOpenSource/wallaby-app-inspector",
"id": "807c478a2394b3ea64acc1768ab6204ad76bef73",
"size": "1425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wallaby/apps/inspector/mainWindow.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "164"
},
{
"name": "Python",
"bytes": "12479"
}
],
"symlink_target": ""
}
|
class Solution(object):
def wordPattern(self, pattern, str):
"""
:type pattern: str
:type str: str
:rtype: bool
"""
words = str.split()
# set default returns the value if it exists, if not sets the default
f = lambda keys: map({}.setdefault, keys, xrange(len(keys)))
return f(pattern) == f(words)
|
{
"content_hash": "63cf601e3b3bdd5e0cc858e503e7fc33",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 77,
"avg_line_length": 33.90909090909091,
"alnum_prop": 0.5576407506702413,
"repo_name": "young-geng/leet_code",
"id": "c6b02f8cb6781db3d0571f10978337999dcdc535",
"size": "966",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problems/290_word-pattern/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "599"
},
{
"name": "Python",
"bytes": "111519"
}
],
"symlink_target": ""
}
|
from nova import block_device
from nova.cells import opts as cells_opts
from nova.cells import rpcapi as cells_rpcapi
from nova import db
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova.openstack.common import log as logging
from nova import utils
LOG = logging.getLogger(__name__)
_BLOCK_DEVICE_OPTIONAL_JOINED_FIELD = ['instance']
BLOCK_DEVICE_OPTIONAL_ATTRS = _BLOCK_DEVICE_OPTIONAL_JOINED_FIELD
def _expected_cols(expected_attrs):
return [attr for attr in expected_attrs
if attr in _BLOCK_DEVICE_OPTIONAL_JOINED_FIELD]
class BlockDeviceMapping(base.NovaPersistentObject, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Add instance_uuid to get_by_volume_id method
# Version 1.2: Instance version 1.14
VERSION = '1.2'
fields = {
'id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'instance': fields.ObjectField('Instance', nullable=True),
'source_type': fields.StringField(nullable=True),
'destination_type': fields.StringField(nullable=True),
'guest_format': fields.StringField(nullable=True),
'device_type': fields.StringField(nullable=True),
'disk_bus': fields.StringField(nullable=True),
'boot_index': fields.IntegerField(nullable=True),
'device_name': fields.StringField(nullable=True),
'delete_on_termination': fields.BooleanField(default=False),
'snapshot_id': fields.StringField(nullable=True),
'volume_id': fields.StringField(nullable=True),
'volume_size': fields.IntegerField(nullable=True),
'image_id': fields.StringField(nullable=True),
'no_device': fields.BooleanField(default=False),
'connection_info': fields.StringField(nullable=True),
}
def obj_make_compatible(self, primitive, target_version):
target_version = utils.convert_version_to_tuple(target_version)
if target_version < (1, 2) and 'instance' in primitive:
primitive['instance'] = (
objects.Instance().object_make_compatible(
primitive['instance']['nova_object.data'], '1.13'))
@staticmethod
def _from_db_object(context, block_device_obj,
db_block_device, expected_attrs=None):
if expected_attrs is None:
expected_attrs = []
for key in block_device_obj.fields:
if key in BLOCK_DEVICE_OPTIONAL_ATTRS:
continue
block_device_obj[key] = db_block_device[key]
if 'instance' in expected_attrs:
my_inst = objects.Instance(context)
my_inst._from_db_object(context, my_inst,
db_block_device['instance'])
block_device_obj.instance = my_inst
block_device_obj._context = context
block_device_obj.obj_reset_changes()
return block_device_obj
@base.remotable
def create(self, context):
cell_type = cells_opts.get_cell_type()
if cell_type == 'api':
raise exception.ObjectActionError(
action='create',
reason='BlockDeviceMapping cannot be '
'created in the API cell.')
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
if 'instance' in updates:
raise exception.ObjectActionError(action='create',
reason='instance assigned')
db_bdm = db.block_device_mapping_create(context, updates, legacy=False)
self._from_db_object(context, self, db_bdm)
if cell_type == 'compute':
cells_api = cells_rpcapi.CellsAPI()
cells_api.bdm_update_or_create_at_top(context, self, create=True)
@base.remotable
def destroy(self, context):
if not self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='destroy',
reason='already destroyed')
db.block_device_mapping_destroy(context, self.id)
delattr(self, base.get_attrname('id'))
cell_type = cells_opts.get_cell_type()
if cell_type == 'compute':
cells_api = cells_rpcapi.CellsAPI()
cells_api.bdm_destroy_at_top(context, self.instance_uuid,
device_name=self.device_name,
volume_id=self.volume_id)
@base.remotable
def save(self, context):
updates = self.obj_get_changes()
if 'instance' in updates:
raise exception.ObjectActionError(action='save',
reason='instance changed')
updates.pop('id', None)
updated = db.block_device_mapping_update(self._context, self.id,
updates, legacy=False)
self._from_db_object(context, self, updated)
cell_type = cells_opts.get_cell_type()
if cell_type == 'compute':
cells_api = cells_rpcapi.CellsAPI()
cells_api.bdm_update_or_create_at_top(context, self)
@base.remotable_classmethod
def get_by_volume_id(cls, context, volume_id,
instance_uuid=None, expected_attrs=None):
if expected_attrs is None:
expected_attrs = []
db_bdm = db.block_device_mapping_get_by_volume_id(
context, volume_id, _expected_cols(expected_attrs))
if not db_bdm:
raise exception.VolumeBDMNotFound(volume_id=volume_id)
# NOTE (ndipanov): Move this to the db layer into a
# get_by_instance_and_volume_id method
if instance_uuid and instance_uuid != db_bdm['instance_uuid']:
raise exception.InvalidVolume(
reason=_("Volume does not belong to the "
"requested instance."))
return cls._from_db_object(context, cls(), db_bdm,
expected_attrs=expected_attrs)
@property
def is_root(self):
return self.boot_index == 0
@property
def is_volume(self):
return self.destination_type == 'volume'
@property
def is_image(self):
return self.source_type == 'image'
def get_image_mapping(self):
return block_device.BlockDeviceDict(self).get_image_mapping()
def obj_load_attr(self, attrname):
if attrname not in BLOCK_DEVICE_OPTIONAL_ATTRS:
raise exception.ObjectActionError(
action='obj_load_attr',
reason='attribute %s not lazy-loadable' % attrname)
if not self._context:
raise exception.OrphanedObjectError(method='obj_load_attr',
objtype=self.obj_name())
LOG.debug("Lazy-loading `%(attr)s' on %(name)s uuid %(uuid)s",
{'attr': attrname,
'name': self.obj_name(),
'uuid': self.uuid,
})
self.instance = objects.Instance.get_by_uuid(self._context,
self.instance_uuid)
self.obj_reset_changes(fields=['instance'])
class BlockDeviceMappingList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: BlockDeviceMapping <= version 1.1
# Version 1.2: Added use_slave to get_by_instance_uuid
# Version 1.3: BlockDeviceMapping <= version 1.2
VERSION = '1.3'
fields = {
'objects': fields.ListOfObjectsField('BlockDeviceMapping'),
}
child_versions = {
'1.0': '1.0',
'1.1': '1.1',
'1.2': '1.1',
'1.3': '1.2',
}
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False):
db_bdms = db.block_device_mapping_get_all_by_instance(
context, instance_uuid, use_slave=use_slave)
return base.obj_make_list(
context, cls(), objects.BlockDeviceMapping, db_bdms or [])
def root_bdm(self):
try:
return (bdm_obj for bdm_obj in self if bdm_obj.is_root).next()
except StopIteration:
return
def root_metadata(self, context, image_api, volume_api):
root_bdm = self.root_bdm()
if not root_bdm:
return {}
if root_bdm.is_volume:
try:
volume = volume_api.get(context, root_bdm.volume_id)
return volume.get('volume_image_metadata', {})
except Exception:
raise exception.InvalidBDMVolume(id=root_bdm.id)
elif root_bdm.is_image:
try:
image_meta = image_api.show(context, root_bdm.image_id)
return image_meta.get('properties', {})
except Exception:
raise exception.InvalidBDMImage(id=root_bdm.id)
else:
return {}
def block_device_make_list(context, db_list, **extra_args):
return base.obj_make_list(context,
objects.BlockDeviceMappingList(context),
objects.BlockDeviceMapping, db_list,
**extra_args)
|
{
"content_hash": "0a681b71a5cdd4168de2297005d1e0c1",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 79,
"avg_line_length": 39.78151260504202,
"alnum_prop": 0.5799535276721588,
"repo_name": "jumpstarter-io/nova",
"id": "218fea742c561a5b20e9f5a575a11e50bd107cd3",
"size": "10076",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/objects/block_device.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import json
import os
from pants.base.exceptions import TaskError
from pants.task.repl_task_mixin import ReplTaskMixin
from pants.util.contextutil import pushd, temporary_dir
from pants.contrib.node.tasks.node_paths import NodePaths
from pants.contrib.node.tasks.node_task import NodeTask
class NodeRepl(ReplTaskMixin, NodeTask):
"""Launches a Node.js REPL session."""
SYNTHETIC_NODE_TARGET_NAME = 'synthetic-node-repl-module'
@classmethod
def prepare(cls, options, round_manager):
super(NodeRepl, cls).prepare(options, round_manager)
round_manager.require_data(NodePaths)
@classmethod
def select_targets(cls, target):
return cls.is_node_package(target)
@classmethod
def supports_passthru_args(cls):
return True
def setup_repl_session(self, targets):
# Let MutexTaskMixin (base of ReplTaskMixin) do its normal filtering/validation logic on all
# targets, but for NodeRepl we only want the subset in target_roots, since we don't need to
# construct a classpath with transitive deps. NPM will install all the transitive deps
# under the synthetic target we create below in launch_repl - we just need to put the
# target_roots in the synthetic target's package.json dependencies.
return [target for target in targets if target in self.context.target_roots]
def launch_repl(self, targets):
with temporary_dir() as temp_dir:
node_paths = self.context.products.get_data(NodePaths)
package_json_path = os.path.join(temp_dir, 'package.json')
package = {
'name': self.SYNTHETIC_NODE_TARGET_NAME,
'version': '0.0.0',
'dependencies': {
target.package_name: node_paths.node_path(target) if self.is_node_module(target)
else target.version for target in targets
}
}
with open(package_json_path, 'wb') as fp:
json.dump(package, fp, indent=2)
args = self.get_passthru_args()
node_repl = self.node_distribution.node_command(
args=args, node_paths=node_paths.all_node_paths if node_paths else None)
with pushd(temp_dir):
# TODO: Expose npm command options via node subsystems.
result, npm_install = self.execute_npm(['install', '--no-optional'],
workunit_name=self.SYNTHETIC_NODE_TARGET_NAME)
if result != 0:
raise TaskError('npm install of synthetic REPL module failed:\n'
'\t{} failed with exit code {}'.format(npm_install, result))
repl_session = node_repl.run()
repl_session.wait()
# TODO(qsong): Issue #4278 Find a good way to preserve the flexibility of Node REPL
# Repl task is hard to take over Node.js native REPL for the following reasons:
# 1. Node.js can simply start from the package source root because node package is
# self-contained.
# 2. There's no simple entry point (binary) for Node.js packages. A package may start from
# node, babel-node, babel-polyfill, webpack, etc.
# In addition, since the repl task is modifing the package.json and there is no lockdown,
# it is impossible to use yarnpkg to start repl unless the dependency resolver is removed.
|
{
"content_hash": "1768e1ac3ed9355438565c893bf3cc6d",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 96,
"avg_line_length": 43.08974358974359,
"alnum_prop": 0.6878905087771496,
"repo_name": "fkorotkov/pants",
"id": "6e5516548d53f318a0dfb2fbb5dc11bc4023b82f",
"size": "3508",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contrib/node/src/python/pants/contrib/node/tasks/node_repl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "1805"
},
{
"name": "HTML",
"bytes": "79866"
},
{
"name": "Java",
"bytes": "481460"
},
{
"name": "JavaScript",
"bytes": "35417"
},
{
"name": "Python",
"bytes": "5931594"
},
{
"name": "Rust",
"bytes": "271643"
},
{
"name": "Scala",
"bytes": "76239"
},
{
"name": "Shell",
"bytes": "74734"
},
{
"name": "Thrift",
"bytes": "2795"
}
],
"symlink_target": ""
}
|
import hashlib
import os
import shutil
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
sys.path.insert(0, src_build_dir)
import vs_toolchain
def _HexDigest(file_name):
hasher = hashlib.sha256()
afile = open(file_name, 'rb')
blocksize = 65536
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
afile.close()
return hasher.hexdigest()
def _CopyImpl(file_name, target_dir, source_dir, verbose=False):
"""Copy |source| to |target| if it doesn't already exist or if it
needs to be updated.
"""
target = os.path.join(target_dir, file_name)
source = os.path.join(source_dir, file_name)
if (os.path.isdir(os.path.dirname(target)) and
((not os.path.isfile(target)) or
_HexDigest(source) != _HexDigest(target))):
if verbose:
print 'Copying %s to %s...' % (source, target)
if os.path.exists(target):
os.unlink(target)
shutil.copy(source, target)
def _ConditionalMkdir(output_dir):
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
def _CopyCDBToOutput(output_dir, target_arch):
"""Copies the Windows debugging executable cdb.exe to the output
directory, which is created if it does not exist. The output
directory, and target architecture that should be copied, are
passed. Supported values for the target architecture are the GYP
values "ia32" and "x64" and the GN values "x86" and "x64".
"""
_ConditionalMkdir(output_dir)
vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
# If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
# when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
win_sdk_dir = os.path.normpath(
os.environ.get('WINDOWSSDKDIR',
'C:\\Program Files (x86)\\Windows Kits\\10'))
if target_arch == 'ia32' or target_arch == 'x86':
src_arch = 'x86'
elif target_arch == 'x64':
src_arch = 'x64'
else:
print 'copy_cdb_to_output.py: unknown target_arch %s' % target_arch
sys.exit(1)
# We need to copy multiple files, so cache the computed source directory.
src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch)
# We need to copy some helper DLLs to get access to the !uniqstack
# command to dump all threads' stacks.
src_winext_dir = os.path.join(src_dir, 'winext')
dst_winext_dir = os.path.join(output_dir, 'winext')
src_winxp_dir = os.path.join(src_dir, 'winxp')
dst_winxp_dir = os.path.join(output_dir, 'winxp')
_ConditionalMkdir(dst_winext_dir)
_ConditionalMkdir(dst_winxp_dir)
# Note that the outputs from the "copy_cdb_to_output" target need to
# be kept in sync with this list.
_CopyImpl('cdb.exe', output_dir, src_dir)
_CopyImpl('dbgeng.dll', output_dir, src_dir)
_CopyImpl('dbghelp.dll', output_dir, src_dir)
_CopyImpl('dbgmodel.dll', output_dir, src_dir)
_CopyImpl('ext.dll', dst_winext_dir, src_winext_dir)
_CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
_CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
_CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
return 0
def main():
if len(sys.argv) < 2:
print >>sys.stderr, 'Usage: copy_cdb_to_output.py <output_dir> ' + \
'<target_arch>'
return 1
return _CopyCDBToOutput(sys.argv[1], sys.argv[2])
if __name__ == '__main__':
sys.exit(main())
|
{
"content_hash": "bf1332dce2f0f26b4f8161747c12705b",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 79,
"avg_line_length": 34.535353535353536,
"alnum_prop": 0.6809008482012284,
"repo_name": "danakj/chromium",
"id": "ea6ab3f4b304c1cf5fa0f28dfe6156362d5c038d",
"size": "3604",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "build/win/copy_cdb_to_output.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('sprints', '0001_initial'),
('tasks', '0002_auto_20140830_2253'),
]
operations = [
migrations.AddField(
model_name='task',
name='sprint',
field=models.ForeignKey(blank=True, to='sprints.Sprint', null=True),
preserve_default=True,
),
]
|
{
"content_hash": "82d649a786d763665a28157699b5c5fc",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 23.7,
"alnum_prop": 0.5822784810126582,
"repo_name": "mc706/task-burndown",
"id": "74d2e914f21f7559d2af7c3616690ca9fefa39d5",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks/migrations/0003_task_sprint.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7411"
},
{
"name": "HTML",
"bytes": "69877"
},
{
"name": "JavaScript",
"bytes": "44199"
},
{
"name": "Python",
"bytes": "24211"
}
],
"symlink_target": ""
}
|
"""flavors: an implementation of the flavor management storage
controller for sqlalchemy.
"""
import oslo_db.exception
import sqlalchemy as sa
from zaqar.storage import base
from zaqar.storage import errors
from zaqar.storage.sqlalchemy import tables
from zaqar.storage.sqlalchemy import utils
class FlavorsController(base.FlavorsBase):
def __init__(self, *args, **kwargs):
super(FlavorsController, self).__init__(*args, **kwargs)
self._pools_ctrl = self.driver.pools_controller
@utils.raises_conn_error
def list(self, project=None, marker=None, limit=10, detailed=False):
marker = marker or ''
# TODO(cpp-cabrera): optimization - limit the columns returned
# when detailed=False by specifying them in the select()
# clause
stmt = sa.sql.select([tables.Flavors]).where(
sa.and_(tables.Flavors.c.name > marker,
tables.Flavors.c.project == project)
)
if limit > 0:
stmt = stmt.limit(limit)
cursor = self.driver.run(stmt)
marker_name = {}
def it():
for cur in cursor:
marker_name['next'] = cur[0]
yield _normalize(cur, detailed=detailed)
yield it()
yield marker_name and marker_name['next']
@utils.raises_conn_error
def get(self, name, project=None, detailed=False):
stmt = sa.sql.select([tables.Flavors]).where(
sa.and_(tables.Flavors.c.name == name,
tables.Flavors.c.project == project)
)
flavor = self.driver.run(stmt).fetchone()
if flavor is None:
raise errors.FlavorDoesNotExist(name)
return _normalize(flavor, detailed)
@utils.raises_conn_error
def create(self, name, project=None, capabilities=None):
cap = None if capabilities is None else utils.json_encode(capabilities)
try:
stmt = sa.sql.expression.insert(tables.Flavors).values(
name=name, project=project,
capabilities=cap
)
self.driver.run(stmt)
except oslo_db.exception.DBDuplicateEntry:
# TODO(flaper87): merge update/create into a single
# method with introduction of upsert
self.update(name,
project=project,
capabilities=capabilities)
@utils.raises_conn_error
def exists(self, name, project=None):
stmt = sa.sql.select([tables.Flavors.c.name]).where(
sa.and_(tables.Flavors.c.name == name,
tables.Flavors.c.project == project)
).limit(1)
return self.driver.run(stmt).fetchone() is not None
@utils.raises_conn_error
def update(self, name, project=None, capabilities=None):
fields = {}
if capabilities is not None:
fields['capabilities'] = capabilities
assert fields, '`capabilities` not found in kwargs'
if 'capabilities' in fields:
fields['capabilities'] = utils.json_encode(fields['capabilities'])
stmt = sa.sql.update(tables.Flavors).where(
sa.and_(tables.Flavors.c.name == name,
tables.Flavors.c.project == project)).values(**fields)
res = self.driver.run(stmt)
if res.rowcount == 0:
raise errors.FlavorDoesNotExist(name)
@utils.raises_conn_error
def delete(self, name, project=None):
stmt = sa.sql.expression.delete(tables.Flavors).where(
sa.and_(tables.Flavors.c.name == name,
tables.Flavors.c.project == project)
)
self.driver.run(stmt)
@utils.raises_conn_error
def drop_all(self):
stmt = sa.sql.expression.delete(tables.Flavors)
self.driver.run(stmt)
def _normalize(flavor, detailed=False):
ret = {
'name': flavor[0],
}
if detailed:
capabilities = flavor[2]
ret['capabilities'] = (utils.json_decode(capabilities)
if capabilities else {})
return ret
|
{
"content_hash": "5f24365da13a3d204c9e0371eede83b2",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 79,
"avg_line_length": 31.984375,
"alnum_prop": 0.5967269174401564,
"repo_name": "openstack/zaqar",
"id": "819bee37a876e3b2072d4fed574d98e6e89b6770",
"size": "4684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zaqar/storage/sqlalchemy/flavors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "5002"
},
{
"name": "HTML",
"bytes": "22106"
},
{
"name": "Lua",
"bytes": "4555"
},
{
"name": "Mako",
"bytes": "952"
},
{
"name": "NASL",
"bytes": "15981"
},
{
"name": "Python",
"bytes": "1912931"
},
{
"name": "Shell",
"bytes": "20061"
}
],
"symlink_target": ""
}
|
import os.path
from pex.build_system.pep_517 import build_sdist
from pex.build_system.testing import assert_build_sdist
from pex.common import touch
from pex.pip.version import PipVersion
from pex.resolve.configured_resolver import ConfiguredResolver
from pex.result import Error
from pex.testing import make_project
from pex.typing import TYPE_CHECKING
from pex.version import __version__
if TYPE_CHECKING:
from typing import Any
def test_build_sdist_project_directory_dne(tmpdir):
# type: (Any) -> None
project_dir = os.path.join(str(tmpdir), "project_dir")
dist_dir = os.path.join(str(tmpdir), "dists")
result = build_sdist(project_dir, dist_dir, PipVersion.VENDORED, ConfiguredResolver.default())
assert isinstance(result, Error)
assert str(result).startswith(
"Project directory {project_dir} does not exist.".format(project_dir=project_dir)
)
def test_build_sdist_project_directory_is_file(tmpdir):
# type: (Any) -> None
project_dir = os.path.join(str(tmpdir), "project_dir")
touch(project_dir)
dist_dir = os.path.join(str(tmpdir), "dists")
result = build_sdist(project_dir, dist_dir, PipVersion.VENDORED, ConfiguredResolver.default())
assert isinstance(result, Error)
assert str(result).startswith(
"Project directory {project_dir} is not a directory.".format(project_dir=project_dir)
)
def test_build_sdist_setup_py(tmpdir):
# type: (Any) -> None
with make_project(name="foo", version="42") as project_dir:
assert_build_sdist(project_dir, "foo", "42", tmpdir)
def test_build_sdist_pyproject_toml(
tmpdir, # type: Any
pex_project_dir, # type: str
):
# type: (...) -> None
assert_build_sdist(pex_project_dir, "pex", __version__, tmpdir)
|
{
"content_hash": "77adb0fa61aff9aa452ac038780a64b2",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 98,
"avg_line_length": 32.2,
"alnum_prop": 0.7035573122529645,
"repo_name": "pantsbuild/pex",
"id": "391de30be354a10acbcb8fcdea3130bf06eaadc0",
"size": "1903",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/build_system/test_pep_517.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1379"
},
{
"name": "Python",
"bytes": "2190044"
},
{
"name": "Shell",
"bytes": "1472"
}
],
"symlink_target": ""
}
|
"""Run a server displaying the administrative UI for the application."""
import logging
import os.path
import google
import jinja2
import webapp2
from google.appengine.tools.devappserver2.admin import admin_request_handler
from google.appengine.tools.devappserver2.admin import blobstore_viewer
from google.appengine.tools.devappserver2.admin import console
from google.appengine.tools.devappserver2.admin import cron_handler
from google.appengine.tools.devappserver2.admin import datastore_indexes_viewer
from google.appengine.tools.devappserver2.admin import datastore_stats_handler
from google.appengine.tools.devappserver2.admin import datastore_viewer
from google.appengine.tools.devappserver2.admin import mail_request_handler
from google.appengine.tools.devappserver2.admin import memcache_viewer
from google.appengine.tools.devappserver2.admin import quit_handler
from google.appengine.tools.devappserver2.admin import search_handler
from google.appengine.tools.devappserver2.admin import servers_handler
from google.appengine.tools.devappserver2.admin import static_file_handler
from google.appengine.tools.devappserver2.admin import taskqueue_queues_handler
from google.appengine.tools.devappserver2.admin import taskqueue_tasks_handler
from google.appengine.tools.devappserver2.admin import xmpp_request_handler
from google.appengine.tools.devappserver2 import wsgi_server
class AdminApplication(webapp2.WSGIApplication):
"""A WSGI application that serves an administrative UI for the application."""
def __init__(self, dispatch, configuration):
"""Initializer for AdminApplication.
Args:
dispatch: A dispatcher.Dispatcher instance used to route requests and
provide state about running servers.
configuration: An application_configuration.ApplicationConfiguration
instance containing the configuration for the application.
"""
super(AdminApplication, self).__init__(
[('/datastore', datastore_viewer.DatastoreRequestHandler),
('/datastore/edit/(.*)', datastore_viewer.DatastoreEditRequestHandler),
('/datastore/edit', datastore_viewer.DatastoreEditRequestHandler),
('/datastore-indexes',
datastore_indexes_viewer.DatastoreIndexesViewer),
('/datastore-stats', datastore_stats_handler.DatastoreStatsHandler),
('/console', console.ConsoleRequestHandler),
('/console/restart/(.+)', console.ConsoleRequestHandler.restart),
('/memcache', memcache_viewer.MemcacheViewerRequestHandler),
('/blobstore', blobstore_viewer.BlobstoreRequestHandler),
('/blobstore/blob/(.+)', blobstore_viewer.BlobRequestHandler),
('/taskqueue', taskqueue_queues_handler.TaskQueueQueuesHandler),
('/taskqueue/queue/(.+)',
taskqueue_tasks_handler.TaskQueueTasksHandler),
('/cron', cron_handler.CronHandler),
('/xmpp', xmpp_request_handler.XmppRequestHandler),
('/mail', mail_request_handler.MailRequestHandler),
('/quit', quit_handler.QuitHandler),
('/search', search_handler.SearchIndexesListHandler),
('/search/document', search_handler.SearchDocumentHandler),
('/search/index', search_handler.SearchIndexHandler),
('/assets/(.+)', static_file_handler.StaticFileHandler),
('/instances', servers_handler.ServersHandler),
webapp2.Route('/',
webapp2.RedirectHandler,
defaults={'_uri': '/instances'})],
debug=True)
self.dispatcher = dispatch
self.configuration = configuration
class AdminServer(wsgi_server.WsgiServer):
"""Serves an administrative UI for the application over HTTP."""
def __init__(self, host, port, dispatch, configuration, xsrf_token_path):
"""Initializer for AdminServer.
Args:
host: A string containing the name of the host that the server should bind
to e.g. "localhost".
port: An int containing the port that the server should bind to e.g. 80.
dispatch: A dispatcher.Dispatcher instance used to route requests and
provide state about running servers.
configuration: An application_configuration.ApplicationConfiguration
instance containing the configuration for the application.
xsrf_token_path: A string containing the path to a file that contains the
XSRF configuration for the admin UI.
"""
self._host = host
self._xsrf_token_path = xsrf_token_path
super(AdminServer, self).__init__((host, port),
AdminApplication(dispatch, configuration))
def start(self):
"""Start the AdminServer."""
admin_request_handler.AdminRequestHandler.init_xsrf(self._xsrf_token_path)
super(AdminServer, self).start()
logging.info('Starting admin server at: http://%s:%d', self._host,
self.port)
def quit(self):
"""Quits the AdminServer."""
super(AdminServer, self).quit()
console.ConsoleRequestHandler.quit()
|
{
"content_hash": "ee7b5811086462688e4c51d9fac1cb0c",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 80,
"avg_line_length": 47.34905660377358,
"alnum_prop": 0.722056186491333,
"repo_name": "elsigh/browserscope",
"id": "96d82e12f33d3c10e2a6fe26dab1b9c0641d3cad",
"size": "5620",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/appengine_tools/devappserver2/admin/admin_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "42911"
},
{
"name": "Groff",
"bytes": "674"
},
{
"name": "HTML",
"bytes": "2895472"
},
{
"name": "JavaScript",
"bytes": "2274900"
},
{
"name": "Python",
"bytes": "4264474"
},
{
"name": "Shell",
"bytes": "642"
}
],
"symlink_target": ""
}
|
from http import HTTPStatus
from unittest import mock
import ddt
import webob
from cinder.api.contrib import qos_specs_manage
from cinder import context
from cinder import db
from cinder import exception
from cinder import objects
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import test
def stub_qos_specs(id):
res = dict(name='qos_specs_' + str(id))
res.update(dict(consumer='back-end'))
res.update(dict(id=str(id)))
specs = {"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5"}
res.update(dict(specs=specs))
res.update(dict(created_at='2017-12-13T02:37:54Z'))
res.update(dict(updated_at='2017-12-13T02:38:58Z'))
return objects.QualityOfServiceSpecs(**res)
def stub_qos_associates(id):
return [{
'association_type': 'volume_type',
'name': 'FakeVolTypeName',
'id': fake.VOLUME_TYPE_ID}]
def return_qos_specs_get_all(context, filters=None, marker=None, limit=None,
offset=None, sort_keys=None, sort_dirs=None):
return [
stub_qos_specs(fake.QOS_SPEC_ID),
stub_qos_specs(fake.QOS_SPEC2_ID),
stub_qos_specs(fake.QOS_SPEC3_ID),
]
def return_qos_specs_get_qos_specs(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
return stub_qos_specs(id)
def return_qos_specs_delete(context, id, force):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.IN_USE_ID:
raise exception.QoSSpecsInUse(specs_id=id)
pass
def return_qos_specs_delete_keys(context, id, keys):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
if 'foo' in keys:
raise exception.QoSSpecsKeyNotFound(specs_id=id,
specs_key='foo')
def return_qos_specs_update(context, id, specs):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.INVALID_ID:
raise exception.InvalidQoSSpecs(reason=id)
elif id == fake.UPDATE_FAILED_ID:
raise exception.QoSSpecsUpdateFailed(specs_id=id,
qos_specs=specs)
pass
def return_qos_specs_create(context, name, specs):
if name == 'qos_spec_%s' % fake.ALREADY_EXISTS_ID:
raise exception.QoSSpecsExists(specs_id=name)
elif name == 'qos_spec_%s' % fake.ACTION_FAILED_ID:
raise exception.QoSSpecsCreateFailed(name=id, qos_specs=specs)
elif name == 'qos_spec_%s' % fake.INVALID_ID:
raise exception.InvalidQoSSpecs(reason=name)
return objects.QualityOfServiceSpecs(name=name,
specs=specs,
created_at='2017-12-13T02:37:54Z',
updated_at='2017-12-13T02:38:58Z',
consumer='back-end',
id=fake.QOS_SPEC_ID)
def return_get_qos_associations(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.RAISE_ID:
raise exception.CinderException()
return stub_qos_associates(id)
def return_associate_qos_specs(context, id, type_id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.ACTION_FAILED_ID:
raise exception.QoSSpecsAssociateFailed(specs_id=id,
type_id=type_id)
elif id == fake.ACTION2_FAILED_ID:
raise exception.QoSSpecsDisassociateFailed(specs_id=id,
type_id=type_id)
if type_id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.VolumeTypeNotFound(
volume_type_id=type_id)
pass
def return_disassociate_all(context, id):
if id == fake.WILL_NOT_BE_FOUND_ID:
raise exception.QoSSpecsNotFound(specs_id=id)
elif id == fake.ACTION2_FAILED_ID:
raise exception.QoSSpecsDisassociateFailed(specs_id=id,
type_id=None)
@ddt.ddt
class QoSSpecManageApiTest(test.TestCase):
def _create_qos_specs(self, name, values=None):
"""Create a transfer object."""
if values:
specs = dict(name=name, qos_specs=values)
else:
specs = {'name': name,
'consumer': 'back-end',
'specs': {
'key1': 'value1',
'key2': 'value2'}}
return db.qos_specs_create(self.ctxt, specs)['id']
def setUp(self):
super(QoSSpecManageApiTest, self).setUp()
self.flags(host='fake')
self.controller = qos_specs_manage.QoSSpecsController()
self.ctxt = context.RequestContext(user_id=fake.USER_ID,
project_id=fake.PROJECT_ID,
is_admin=True)
self.user_ctxt = context.RequestContext(
fake.USER_ID, fake.PROJECT_ID, auth_token=True)
self.qos_id1 = self._create_qos_specs("Qos_test_1")
self.qos_id2 = self._create_qos_specs("Qos_test_2")
self.qos_id3 = self._create_qos_specs("Qos_test_3")
self.qos_id4 = self._create_qos_specs("Qos_test_4")
@mock.patch('cinder.volume.qos_specs.get_all_specs',
side_effect=return_qos_specs_get_all)
def test_index(self, mock_get_all_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
names = set()
for item in res['qos_specs']:
self.assertEqual('value1', item['specs']['key1'])
names.add(item['name'])
expected_names = ['qos_specs_%s' % fake.QOS_SPEC_ID,
'qos_specs_%s' % fake.QOS_SPEC2_ID,
'qos_specs_%s' % fake.QOS_SPEC3_ID]
self.assertEqual(set(expected_names), names)
def test_index_with_limit(self):
url = '/v3/%s/qos-specs?limit=2' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(2, len(res['qos_specs']))
self.assertEqual(self.qos_id4, res['qos_specs'][0]['id'])
self.assertEqual(self.qos_id3, res['qos_specs'][1]['id'])
expect_next_link = ('http://localhost/v3/%s/qos-specs?limit'
'=2&marker=%s') % (
fake.PROJECT_ID, res['qos_specs'][1]['id'])
self.assertEqual(expect_next_link, res['qos_specs_links'][0]['href'])
def test_index_with_offset(self):
url = '/v3/%s/qos-specs?offset=1' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
def test_index_with_offset_out_of_range(self):
url = '/v3/%s/qos-specs?offset=356576877698707' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index,
req)
def test_index_with_limit_and_offset(self):
url = '/v3/%s/qos-specs?limit=2&offset=1' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(2, len(res['qos_specs']))
self.assertEqual(self.qos_id3, res['qos_specs'][0]['id'])
self.assertEqual(self.qos_id2, res['qos_specs'][1]['id'])
def test_index_with_marker(self):
url = '/v3/%s/qos-specs?marker=%s' % (fake.PROJECT_ID, self.qos_id4)
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(3, len(res['qos_specs']))
def test_index_with_filter(self):
url = '/v3/%s/qos-specs?id=%s' % (fake.PROJECT_ID, self.qos_id4)
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(1, len(res['qos_specs']))
self.assertEqual(self.qos_id4, res['qos_specs'][0]['id'])
def test_index_with_sort_keys(self):
url = '/v3/%s/qos-specs?sort=id' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(4, len(res['qos_specs']))
expect_result = [self.qos_id1, self.qos_id2,
self.qos_id3, self.qos_id4]
expect_result.sort(reverse=True)
self.assertEqual(expect_result[0], res['qos_specs'][0]['id'])
self.assertEqual(expect_result[1], res['qos_specs'][1]['id'])
self.assertEqual(expect_result[2], res['qos_specs'][2]['id'])
self.assertEqual(expect_result[3], res['qos_specs'][3]['id'])
def test_index_with_sort_keys_and_sort_dirs(self):
url = '/v3/%s/qos-specs?sort=id:asc' % fake.PROJECT_ID
req = fakes.HTTPRequest.blank(url, use_admin_context=True)
res = self.controller.index(req)
self.assertEqual(4, len(res['qos_specs']))
expect_result = [self.qos_id1, self.qos_id2,
self.qos_id3, self.qos_id4]
expect_result.sort()
self.assertEqual(expect_result[0], res['qos_specs'][0]['id'])
self.assertEqual(expect_result[1], res['qos_specs'][1]['id'])
self.assertEqual(expect_result[2], res['qos_specs'][2]['id'])
self.assertEqual(expect_result[3], res['qos_specs'][3]['id'])
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete(self, mock_qos_delete, mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=True)
self.controller.delete(req, fake.QOS_SPEC_ID)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_not_found(self, mock_qos_delete,
mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.delete, req,
fake.WILL_NOT_BE_FOUND_ID)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_inuse(self, mock_qos_delete,
mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.IN_USE_ID), use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, fake.IN_USE_ID)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete',
side_effect=return_qos_specs_delete)
def test_qos_specs_delete_inuse_force(self, mock_qos_delete,
mock_qos_get_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s?force=True' %
(fake.PROJECT_ID, fake.IN_USE_ID),
use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.delete,
req, fake.IN_USE_ID)
self.assertEqual(1, self.notifier.get_notification_count())
def test_qos_specs_delete_with_invalid_force(self):
invalid_force = "invalid_bool"
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/delete_keys?force=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, invalid_force),
use_admin_context=True)
self.assertRaises(exception.InvalidParameterValue,
self.controller.delete,
req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys(self, mock_qos_delete_keys,
mock_get_qos):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID),
use_admin_context=True)
self.controller.delete_keys(req, fake.IN_USE_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys_qos_notfound(self, mock_qos_specs_delete):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.delete_keys,
req, fake.WILL_NOT_BE_FOUND_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
def test_qos_specs_delete_keys_invalid_key(self):
body = {"keys": ['', None]}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID,
fake.IN_USE_ID),
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.delete_keys,
req, fake.IN_USE_ID, body=body)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
def test_qos_specs_delete_keys_badkey(self, mock_qos_specs_delete,
mock_get_qos):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID),
use_admin_context=True)
body = {"keys": ['foo', 'zoo']}
self.assertRaises(exception.QoSSpecsKeyNotFound,
self.controller.delete_keys,
req, fake.IN_USE_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.delete_keys',
side_effect=return_qos_specs_delete_keys)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_qos_specs_delete_keys_get_notifier(self, mock_get_qos_specs,
mock_qos_delete_keys):
body = {"keys": ['bar', 'zoo']}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/delete_keys' %
(fake.PROJECT_ID, fake.IN_USE_ID),
use_admin_context=True)
self.controller.delete_keys(req, fake.IN_USE_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create(self, mock_qos_spec_create):
body = {"qos_specs": {"name": "qos_specs_%s" % fake.QOS_SPEC_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' %
fake.PROJECT_ID,
use_admin_context=True)
res_dict = self.controller.create(req, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
self.assertEqual('qos_specs_%s' % fake.QOS_SPEC_ID,
res_dict['qos_specs']['name'])
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_invalid_input(self, mock_qos_get_specs):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.INVALID_ID,
"consumer": "invalid_consumer"}}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_conflict(self, mock_qos_spec_create):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.ALREADY_EXISTS_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create, req, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.create',
side_effect=return_qos_specs_create)
def test_create_failed(self, mock_qos_spec_create):
body = {"qos_specs": {"name": 'qos_spec_%s' % fake.ACTION_FAILED_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.create, req, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@ddt.data({'foo': {'a': 'b'}},
{'qos_specs': {'a': 'b'}},
{'qos_specs': 'string'},
None)
def test_create_invalid_body_bad_request(self, body):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
@ddt.data({'name': 'fake_name', 'a' * 256: 'a'},
{'name': 'fake_name', 'a': 'a' * 256},
{'name': 'fake_name', '': 'a'})
def test_create_qos_with_invalid_specs(self, value):
body = {'qos_specs': value}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(exception.InvalidInput,
self.controller.create, req, body=body)
@ddt.data(({'name': None}, exception.ValidationError),
({'name': ''}, exception.ValidationError),
({'name': ' '}, exception.ValidationError),
({'name': 'n' * 256}, exception.ValidationError))
@ddt.unpack
def test_create_qos_with_invalid_spec_name(self, value, exception_class):
body = {'qos_specs': value}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' % fake.PROJECT_ID,
use_admin_context=True)
req.method = 'POST'
self.assertRaises(exception_class,
self.controller.create, req, body=body)
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_update(self, mock_get_qos, mock_qos_update):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID),
use_admin_context=True)
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
res = self.controller.update(req, fake.QOS_SPEC_ID, body=body)
self.assertDictEqual(body, res)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_update_not_found(self, mock_get_qos_specs, mock_qos_update):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.update,
req, fake.WILL_NOT_BE_FOUND_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update_invalid_input(self, mock_qos_update, mock_get_qos):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.INVALID_ID),
use_admin_context=True)
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(exception.InvalidQoSSpecs,
self.controller.update,
req, fake.INVALID_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@ddt.data({'qos_specs': {'key1': ['value1']}},
{'qos_specs': {1: 'value1'}}
)
def test_update_non_string_key_or_value(self, body, mock_get_qos):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.UUID1),
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.update,
req, fake.UUID1, body=body)
self.assertEqual(0, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.update',
side_effect=return_qos_specs_update)
def test_update_failed(self, mock_qos_update, mock_get_qos):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID,
fake.UPDATE_FAILED_ID),
use_admin_context=True)
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.update,
req, fake.UPDATE_FAILED_ID, body=body)
self.assertEqual(1, self.notifier.get_notification_count())
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_show(self, mock_get_qos_specs):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=True)
res_dict = self.controller.show(req, fake.QOS_SPEC_ID)
self.assertEqual(fake.QOS_SPEC_ID, res_dict['qos_specs']['id'])
self.assertEqual('qos_specs_%s' % fake.QOS_SPEC_ID,
res_dict['qos_specs']['name'])
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_get_associations(self, mock_get_qos, mock_get_assciations):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associations' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=True)
res = self.controller.associations(req, fake.QOS_SPEC_ID)
self.assertEqual('FakeVolTypeName',
res['qos_associations'][0]['name'])
self.assertEqual(fake.VOLUME_TYPE_ID,
res['qos_associations'][0]['id'])
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
def test_get_associations_not_found(self, mock_get_assciations):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associations' %
(fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.associations,
req, fake.WILL_NOT_BE_FOUND_ID)
@mock.patch('cinder.volume.qos_specs.get_associations',
side_effect=return_get_qos_associations)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
def test_get_associations_failed(self, mock_get_qos,
mock_get_associations):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associations' % (
fake.PROJECT_ID, fake.RAISE_ID), use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.associations,
req, fake.RAISE_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.VOLUME_TYPE_ID),
use_admin_context=True)
res = self.controller.associate(req, fake.QOS_SPEC_ID)
self.assertEqual(HTTPStatus.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_no_type(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s/associate' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID),
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.associate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_not_found(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID,
fake.VOLUME_TYPE_ID), use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.associate, req,
fake.WILL_NOT_BE_FOUND_ID)
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.VolumeTypeNotFound,
self.controller.associate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.associate_qos_with_type',
side_effect=return_associate_qos_specs)
def test_associate_fail(self, mock_associate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/associate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.ACTION_FAILED_ID, fake.VOLUME_TYPE_ID),
use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.associate, req,
fake.ACTION_FAILED_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID, fake.VOLUME_TYPE_ID),
use_admin_context=True)
res = self.controller.disassociate(req, fake.QOS_SPEC_ID)
self.assertEqual(HTTPStatus.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_no_type(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.disassociate, req, fake.QOS_SPEC_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_not_found(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID,
fake.VOLUME_TYPE_ID), use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.disassociate, req,
fake.WILL_NOT_BE_FOUND_ID)
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate?vol_type_id=%s' %
(fake.PROJECT_ID, fake.VOLUME_TYPE_ID, fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.VolumeTypeNotFound,
self.controller.disassociate, req,
fake.VOLUME_TYPE_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_qos_specs',
side_effect=return_associate_qos_specs)
def test_disassociate_failed(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate?vol_type_id=%s' % (
fake.PROJECT_ID, fake.ACTION2_FAILED_ID, fake.VOLUME_TYPE_ID),
use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.disassociate, req,
fake.ACTION2_FAILED_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all(self, mock_disassociate, mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=True)
res = self.controller.disassociate_all(req, fake.QOS_SPEC_ID)
self.assertEqual(HTTPStatus.ACCEPTED, res.status_int)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all_not_found(self, mock_disassociate,
mock_get_qos):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.WILL_NOT_BE_FOUND_ID),
use_admin_context=True)
self.assertRaises(exception.QoSSpecsNotFound,
self.controller.disassociate_all, req,
fake.WILL_NOT_BE_FOUND_ID)
@mock.patch('cinder.volume.qos_specs.get_qos_specs',
side_effect=return_qos_specs_get_qos_specs)
@mock.patch('cinder.volume.qos_specs.disassociate_all',
side_effect=return_disassociate_all)
def test_disassociate_all_failed(self, mock_disassociate, mock_get):
req = fakes.HTTPRequest.blank(
'/v3/%s/qos-specs/%s/disassociate_all' % (
fake.PROJECT_ID, fake.ACTION2_FAILED_ID),
use_admin_context=True)
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.disassociate_all, req,
fake.ACTION2_FAILED_ID)
def test_index_no_admin_user(self):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' %
fake.PROJECT_ID, use_admin_context=False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_create_no_admin_user(self):
body = {"qos_specs": {"name": "qos_specs_%s" % fake.QOS_SPEC_ID,
"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs' %
fake.PROJECT_ID, use_admin_context=False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create, req, body=body)
def test_update_no_admin_user(self):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' %
(fake.PROJECT_ID, fake.QOS_SPEC_ID),
use_admin_context=False)
body = {'qos_specs': {'key1': 'value1',
'key2': 'value2'}}
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, fake.QOS_SPEC_ID,
body=body)
def test_qos_specs_delete_no_admin_user(self):
req = fakes.HTTPRequest.blank('/v3/%s/qos-specs/%s' % (
fake.PROJECT_ID, fake.QOS_SPEC_ID), use_admin_context=False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.delete, req, fake.QOS_SPEC_ID)
|
{
"content_hash": "663a011fabf96b34d4333c0439396108",
"timestamp": "",
"source": "github",
"line_count": 785,
"max_line_length": 79,
"avg_line_length": 47.38980891719745,
"alnum_prop": 0.5682911749684149,
"repo_name": "mahak/cinder",
"id": "1590ddff60db19651a42b1c01d95abec12f7dabe",
"size": "37864",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/api/contrib/test_qos_specs_manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "259"
},
{
"name": "Mako",
"bytes": "976"
},
{
"name": "Python",
"bytes": "25078356"
},
{
"name": "Shell",
"bytes": "6456"
},
{
"name": "Smarty",
"bytes": "67595"
}
],
"symlink_target": ""
}
|
'''
Created on 6 janv. 2014
@author: didia
'''
from shop.handlers.base_handler import RpcBaseHandler
from market.handlers.main import RpcMainGetEvents
from market.lib import event_review
from market.lib.attendance import AttendanceManager
from market.lib.event import EventManager
from market.market_exceptions import RpcMarketException
from market.handlers.base import student_required
from google.appengine.ext import ndb
from .base import BaseHandler
def participant_required(handler):
def check_participation(self, *args, **kargs):
event_key = self.request.get('event_key')
if AttendanceManager.is_user_attending_event(event_key, self.user_info['user_id']):
return handler(self, *args, **kargs)
else:
self.abort(404)
class ShowReviewingHandler(BaseHandler):
@student_required
def get(self):
context = self.get_template_context()
context['events'] = context['events'][:15]
self.render_template('show_many_events.html', context)
def get_template_context(self):
context = dict()
context['left_sidebar'] = 'reviewing'
context['events'] = EventManager.get_events_reviewing(self.user_info['user_id'])
return context
class RpcReviewingGetEvents(ShowReviewingHandler, RpcMainGetEvents):
@student_required
def get(self, *args, **kargs):
filter_key = self.request.get('filter_key')
sort_order = self.request.get('sort_order')
tab = self.request.route_kwargs.get('page')
events = EventManager.get_events_reviewing(self.user_info['user_id'],
category=tab,
filtered_by=filter_key,
ordered_by=sort_order)
self.prepare_and_serve_events(events)
class RpcReviewingHandler(RpcBaseHandler):
@student_required
def post(self):
try:
event_key_urlsafe = self.request.get('event_key')
score = int(self.request.get('score'))
if score < 1 or score > 5:
raise RpcMarketException(msg="Unable to save the message")
self.review_event(event_key_urlsafe, score)
self.send_success_response()
except RpcMarketException as e:
self.send_failed_response(e)
#@participant_required
def review_event(self, event_key, score):
return event_review.review_event(event_key, self.user_info['user_id'], score)
|
{
"content_hash": "104a3bf42943c7d8f21495623246b8db",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 91,
"avg_line_length": 36.513888888888886,
"alnum_prop": 0.6146823887409661,
"repo_name": "EventBuck/EventBuck",
"id": "1fce365349897e57418dc29457d3dd3207775612",
"size": "2675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "market/handlers/event/reviewing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "72386"
},
{
"name": "JavaScript",
"bytes": "178307"
},
{
"name": "Python",
"bytes": "302393"
}
],
"symlink_target": ""
}
|
from ipdb import launch_ipdb_on_exception
from packagesample import start
with launch_ipdb_on_exception():
start.main()
|
{
"content_hash": "501fceefcc60c667d4957efe393dea66",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 41,
"avg_line_length": 25,
"alnum_prop": 0.776,
"repo_name": "lovato/machete",
"id": "c269bee975fcb74fd396911b0b0f2aa322b17665",
"size": "187",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "machete/templates/app/run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "29"
},
{
"name": "HTML",
"bytes": "1532"
},
{
"name": "JavaScript",
"bytes": "22"
},
{
"name": "Python",
"bytes": "19456"
}
],
"symlink_target": ""
}
|
import inspect
import inflection
from marshmallow import Schema, fields
from pillowtalk.exceptions import PillowtalkError
from pillowtalk.relationship import Relationship
from pillowtalk.utils import validate_init
# TODO: Ability to add relationships without relationship string interpretation
# TODO: Wrap model collections in a class such that __getitem__ will fullfill the relationship...
# TODO: Inherit fields and relationships from super class
# TODO: Automatically load class when relationship is fullfilled so you don't have to code in cls.load(r) in the Base class you use
# TODO: partially unmarshalled lists, when calling [i] is will update the object on the fly...
# TODO: needs to know when object is deserialized completely.
class APIInterface(object):
@classmethod
def find(cls, *args, **kwargs):
raise NotImplementedError("method \"{0}\" is not yet implemented for {1}.".format("find", cls.__name__))
@classmethod
def where(cls, *args, **kwargs):
raise NotImplementedError("method \"{0}\" is not yet implemented for {1}.".format("where", cls.__name__))
@classmethod
def all(cls):
raise NotImplementedError("method \"{0}\" is not yet implemented for {1}.".format("all", cls.__name__))
@classmethod
def find_by_name(cls, *args, **kwargs):
raise NotImplementedError("method \"{0}\" is not yet implemented for {1}.".format("find_by_name", cls.__name__))
def update(cls, *args, **kwargs):
# self.__dict__.update(self.__class__.find(self.id).__dict__)
raise NotImplementedError("method \"{0}\" is not yet implemented for {1}.".format("update", cls.__name__))
# TODO: Force unmarshalling of all or some of the relationships...
def force(self):
raise NotImplementedError("Force is not yet implemented")
class PillowtalkBase(APIInterface, object):
""" Basic model for api items """
Schema = None
models = {}
UNMARSHALL = "_unmarshall"
@validate_init
def __init__(self, *args, **kwargs):
vars(self).update(kwargs)
self.raw = None
self.__class__.check_for_schema()
self._unmarshall = False
@classmethod
def check_for_schema(cls):
""" Checks to see if class has a Schema """
if not hasattr(cls, "Schema") or cls.Schema is None:
raise PillowtalkError("Schema not found. @add_schema may not have been added to class definition.")
def __getattribute__(self, name):
""" Override for attributes. If attribute is found and attribute is a relationship, an attempt to fullfill
the relationship will be made. """
x = object.__getattribute__(self, name)
if name.startswith("_"):
return x
schema_cls = object.__getattribute__(self, Schema.__name__)
if name in schema_cls.relationships:
if object.__getattribute__(self, PillowtalkBase.UNMARSHALL): # locking marshalling prevents recursion
# Decide to use original value or fullfilled value...
r = schema_cls.relationships[name]
if type(x) is r.mod2: # if relationship is already fullfilled
return x
else:
new_x = self.fullfill_relationship(name)
if new_x is not None and new_x != [None] and new_x != []:
return new_x
if issubclass(x.__class__, Relationship):
raise TypeError("Relationship \"name\" was not correctly resolved.")
return x
def __getattr__(self, name, saveattr=False):
""" If attribute is not found, attempts to fullfill the relationship """
schema_cls = object.__getattribute__(self, Schema.__name__)
if name in schema_cls.relationships:
v = self.fullfill_relationship(name)
if saveattr:
setattr(self, name, v)
return v
# TODO: if attribute doesn't exist, attempt to update from database
v = object.__getattribute__(self, name)
return v
def _get_relationship(self, name):
return self.Schema.relationships[name]
# def _has_relationship(self, name):
# schema_cls = object.__getattribute__(self, Schema.__name__)
# return name in schema_cls.relationships
@classmethod
def model_fields(cls):
""" Returns the models fields """
members = inspect.getmembers(cls, lambda a: not (inspect.isroutine(a)))
return [m for m in members if issubclass(m[1].__class__, fields.Field)]
def fullfill_relationship(self, relationship_name):
"""
Fullfills a relationship using "using", "ref", "fxn."
Sample
Promise("sample_type", <SampleType>, "sample_type_id", "find")
"""
relationship = self._get_relationship(relationship_name)
self._lock_unmarshalling()
x = relationship.fullfill(self)
self._unlock_unmarshalling()
return x
@classmethod
def to_model(cls, result, additional_opts=None):
""" Loads model from result using Marshmallow schema located in cls.Schema """
opts = {}
opts.update(cls.get_schema_opts())
if additional_opts is not None:
opts.update(additional_opts)
schema = cls.Schema(**opts)
model, errors = schema.load(result)
return model
@classmethod
def get_model_by_name(cls, name):
""" Converts a snake_case model name to the model object """
model_name = inflection.camelize(name) # class name of the model to use
model = cls.models[model_name]
return model
@classmethod
def get_schema_opts(cls):
if hasattr(cls, "SCHEMA_OPTS"):
return cls.SCHEMA_OPTS
else:
return {}
@classmethod
def json_to_model(cls, data):
""" Converts a json to a model using a Schema """
m = cls.to_model(data)
m.raw = data
cls._unlock_unmarshalling(m)
cls.set_additional_fields(m, data)
return m
@classmethod
def json_to_models(cls, data):
""" Converts a list of json to a list of models using a Schema """
models = cls.to_model(data, {"many": True})
for model_data, model in zip(data, models):
model.raw = model_data
cls._unlock_unmarshalling(model)
cls.set_additional_fields(model, model_data)
return models
def _lock_unmarshalling(self):
""" locks model so relationships cannot be fullfilled. Prevents recursion. """
object.__setattr__(self, PillowtalkBase.UNMARSHALL, False)
def _unlock_unmarshalling(self):
""" unlocks model so relationships can be fullfilled. """
object.__setattr__(self, PillowtalkBase.UNMARSHALL, True)
# def _add_relationships(self):
# """ Copies relationship found in the Schema to this instance """
# for name, relationship in self.__class__.Schema.relationships.items():
# setattr(self, name, self._get_relationship(name))
@classmethod
def set_additional_fields(cls, model, data):
""" Set attributes for additional fields not found in the Schema or model definition """
for k, v in data.items():
if not hasattr(model, k):
setattr(model, k, v)
# TODO: forward propogate properties if there is a relationship...from
@classmethod
def load(cls, data):
""" Special load that will unmarshall dict objects or a list of dict objects """
cls.check_for_schema()
models = None
if type(data) is list:
models = cls.json_to_models(data)
# if len(models) > 0 and issubclass(models[0].__class__, PillowtalkBase):
# # [m._add_relationships() for m in models]
elif type(data) is dict:
models = cls.json_to_model(data)
else:
raise PillowtalkError("Data not recognized. Supply a dict or list: \"{0}\"".format(data))
return models
def dump(self):
s = self.__class__.Schema()
return s.dump(self).data
|
{
"content_hash": "d2282644b29428a431a9996bceaa061f",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 131,
"avg_line_length": 39.71219512195122,
"alnum_prop": 0.6197027392212259,
"repo_name": "jvrana/Pillowtalk",
"id": "fcc704b12c082adcc02b5950dccb2f9e5b638de7",
"size": "8141",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pillowtalk/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52925"
}
],
"symlink_target": ""
}
|
from cms.admin.dialog.forms import get_copy_dialog_form
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.admin.views.decorators import staff_member_required
from django.http import Http404, HttpResponse
from django.conf import settings
from cms.models import Page
@staff_member_required
def get_copy_dialog(request, page_id):
if not settings.CMS_PERMISSION or not settings.CMS_MODERATOR:
return HttpResponse('')
page = get_object_or_404(Page, pk=page_id)
target = get_object_or_404(Page, pk=request.REQUEST['target'])
if not page.has_change_permission(request) or \
not target.has_add_permission(request):
raise Http404
context = {
'dialog_id': 'dialog-copy',
'form': get_copy_dialog_form(request)(),
'callback': request.REQUEST['callback'],
}
return render_to_response("admin/cms/page/dialog/copy.html", context)
|
{
"content_hash": "9c24cc55d10ddf102eee42bd233f2159",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 73,
"avg_line_length": 37.84,
"alnum_prop": 0.7029598308668076,
"repo_name": "emiquelito/django-cms-2.0",
"id": "f7264d89247663cd466f161ae8db1fc4b69e2f6a",
"size": "946",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cms/admin/dialog/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "711305"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "1196111"
}
],
"symlink_target": ""
}
|
import roslib;
import rospy
import smach
import smach_ros
import os
from gazebo_msgs.srv import (
SpawnModel,
DeleteModel,
)
import os
from sensor_msgs.msg import JointState
import struct
import rospy
from std_msgs.msg import Header
from baxter_core_msgs.srv import (
SolvePositionIK,
SolvePositionIKRequest,
)
import rospkg
from geometry_msgs.msg import (
PoseStamped,
Pose,
Point,
Quaternion,
)
import baxter_interface
from baxter_interface import CHECK_VERSION
def delete_gazebo_model(model):
try:
delete_model = rospy.ServiceProxy('/gazebo/delete_model', DeleteModel)
resp_delete = delete_model(model)
except rospy.ServiceException, e:
rospy.loginfo("Delete Model service call failed: {0}".format(e))
class LoadGazeboModelState(smach.State):
def __init__(self, name, model_path, input_keys = ['pose', 'reference_frame'], pose_cb = None):
smach.State.__init__(self, input_keys=input_keys, outcomes=['succeeded'])
self._name = name
self._model_path = model_path
# Set up a poses callback
self._pose_cb = pose_cb
def execute(self, userdata):
# If a pose callback has been defined, use it to format
# pose specified by the input keys in the userdata
if self._pose_cb:
try:
pose = self._pose_cb(userdata)
except Exception as e:
rospy.logerr('Error when using poses callback to format poses: ' + repr(e))
raise
else:
if 'pose' in userdata:
pose = userdata.pose
else:
raise ValueError('Pose should be specified in userdata!')
# Parse pose
try:
if isinstance(pose, PoseStamped):
pose = pose.pose
elif isinstance(pose, Pose):
pose = pose
elif isinstance(pose, list):
position = Point(x=pose[0][0], y=pose[0][1], z=pose[0][2])
orientation = Quaternion(x=pose[1][0], y=pose[1][1], z=pose[1][2], w=pose[1][3])
pose = Pose(position=position, orientation=orientation)
else:
raise ValueError('Pose should be specified as a list, Pose or PoseStamped!')
except Exception as e:
rospy.logerr('Error when parsing Gazebo model pose: ' + repr(e))
raise
# Parse reference_frame
try:
if 'reference_frame' in userdata:
reference_frame = userdata.reference_frame
if isinstance(reference_frame, str):
pass
elif isinstance(reference_frame, list):
if isinstance(reference_frame[0], str):
reference_frame = reference_frame[0]
else:
raise ValueError('The reference frame should be specified as a string!')
else:
raise ValueError('The reference frame should be specified as a string!')
else:
raise ValueError('The reference frame should be specified in userdata!')
except Exception as e:
rospy.logerr('Error when parsing Gazebo model reference frame: ' + repr(e))
raise
# Load model SDF/URDF XML
try:
model_xml = ''
with open(self._model_path, 'r') as model_file:
model_xml = model_file.read().replace('\n', '')
except Exception as e:
rospy.logerr('Error when loading Gazebo model XML file: ' + repr(e))
raise
# Spawn model SDF/URDF
try:
if os.path.splitext(self._model_path)[1][1:].lower() == 'sdf':
spawn_service_type = 'sdf'
elif os.path.splitext(self._model_path)[1][1:].lower() == 'urdf':
spawn_service_type = 'urdf'
except Exception as e:
rospy.logerr('Error when determining whether Gazebo model is SDF or URDF: ' + repr(e))
raise
try:
spawn_service_proxy = rospy.ServiceProxy('/gazebo/spawn_sdf_model', SpawnModel)
spawn_response = spawn_service_proxy(self._name, model_xml, "/",
pose, reference_frame)
except rospy.ServiceException, e:
rospy.logerr('Spawn ' + spawn_service_type.upper() + ' service call failed: {0}'.format(e))
return 'succeeded'
class MoveToJointPositionsState(smach.State):
""" This state moves a given limb to the specified joint positions using the Baxter inteface.
Note that this state does not make use of the joint trajectory action server.
"""
def __init__(self, limb_interfaces, timeout=15.0, input_keys = ['limb', 'positions'], positions_cb = None):
smach.State.__init__(self, outcomes=['succeeded', 'aborted'], input_keys=input_keys)
self._limb_interfaces = limb_interfaces
self._timeout = timeout
# Set up a points callback
self._positions_cb = positions_cb
def execute(self, userdata):
# Get limb from userdata
limb = userdata.limb
# If a positions callback has been defined, use it to format
# positions specified by the input keys in the userdata
if self._positions_cb:
try:
positions = self._positions_cb(userdata)
except Exception as e:
rospy.logerr('Error when using positions callback to format joint positions: ' + repr(e))
raise
else:
if 'positions' in userdata:
positions = userdata.positions
else:
raise ValueError('Joint positions should be specified in userdata!')
# Check whether or not positions is a singleton and convert if necessary
try:
if isinstance(positions, list):
if isinstance(positions[0], list) or isinstance(positions[0], JointState):
positions = positions[0]
except Exception as e:
rospy.logerr('Error when converting joint positions to singleton: ' + repr(e))
raise
# Parse positions
try:
if isinstance(positions, list):
limb_joint_names = [self._limb_interfaces[limb].name + joint_name for joint_name in ['_s0', '_s1', '_e0', '_e1', '_w0', '_w1', '_w2']]
positions = dict(zip(limb_joint_names, positions))
elif isinstance(positions, JointState):
positions = dict(zip(positions.name, positions.position))
else:
raise ValueError('Positions should be specified as a list or a JointState.')
except Exception as e:
rospy.logerr('Error when parsing joint positions: ' + repr(e))
raise
try:
self._limb_interfaces[limb].move_to_joint_positions(positions, timeout=self._timeout)
except Exception as e:
rospy.logerr('Error when using limb interface to move to joint positions: ' + repr(e))
raise
return 'succeeded'
class PoseToJointTrajServiceState(smach.State):
def __init__(self, ik_service_proxies, timeout=5.0,
input_keys=['limb', 'poses', 'offsets'], output_keys=['joints'], poses_cb = None, offsets_cb = None):
smach.State.__init__(self, outcomes=['succeeded', 'aborted'], input_keys=input_keys, output_keys=output_keys)
self._ik_service_proxies = ik_service_proxies
self._timeout = timeout
self._verbose = True
# Set up a poses callback
self._poses_cb = poses_cb
# Set up an offsets callback
self._offsets_cb = offsets_cb
def execute(self, userdata):
# Get limb from userdata
limb = userdata.limb
# If a poses callback has been defined, use it to format
# poses specified by the input keys in the userdata
if self._poses_cb:
try:
poses = self._poses_cb(userdata)
except Exception as e:
rospy.logerr('Error when using poses callback to format poses: ' + repr(e))
raise
else:
if 'poses' in userdata:
poses = userdata.poses
else:
raise ValueError('Joint positions should be specified in userdata!')
# If an offsets callback has been defined, use it to format
# offsets specified by the input keys in the userdata
if self._offsets_cb:
try:
offsets = self._offsets_cb(userdata)
except Exception as e:
rospy.logerr('Error when using offsets callback to format pose offsets: ' + repr(e))
raise
else:
if 'offsets' in userdata:
offsets = userdata.offsets
else:
offsets = None
# Check if poses is a list, singleton or otherwise, and convert if necessary
try:
if not isinstance(poses, list):
poses = [poses]
elif len(poses) == 2 and len(poses[0]) != len(poses[1]):
poses = [poses]
else:
raise ValueError('Poses should be specified as a list!')
except Exception as e:
rospy.logerr('Error when converting poses to a list: ' + repr(e))
raise
# Check if offsets is a singleton and convert to list if necessary
if offsets:
try:
if not isinstance(offsets, list):
offsets = [offsets]
elif len(offsets) == 2 and len(offsets[0]) != len(offsets[1]):
offsets = [offsets]
else:
raise ValueError('Offsets should be specified as a list!')
except Exception as e:
rospy.logerr('Error when converting offsets to a list: ' + repr(e))
raise
# Set up a request object
ik_request = SolvePositionIKRequest()
# Parse poses from userdata, stamp them, add offsets if required,
# and append to inverse kinematics request.
try:
header = Header(stamp=rospy.Time.now(), frame_id='base')
for i_pose in range(len(poses)):
# Parse pose
pose = poses[i_pose]
if isinstance(pose, PoseStamped):
pose_stamped = pose
elif isinstance(pose, Pose):
pose_stamped = PoseStamped(header=header, pose=pose)
elif isinstance(pose, list):
position = Point(x=pose[0][0], y=pose[0][1], z=pose[0][2])
orientation = Quaternion(x=pose[1][0], y=pose[1][1], z=pose[1][2], w=pose[1][3])
pose_stamped = PoseStamped(header=header, pose = Pose(position=position, orientation=orientation))
else:
return 'aborted'
# Parse offset
if offsets:
offset = offsets[i_pose]
if isinstance(offset, PoseStamped):
offset = offset.pose
elif isinstance(offset, Pose):
pass
elif isinstance(offset, list):
offset = Pose(position=Point(x=offset[0][0], y=offset[0][1], z=offset[0][2]),
orientation=Quaternion(x=offset[1][0], y=offset[1][1], z=offset[1][2], w=offset[1][3]))
pose_stamped.pose.position.x = pose_stamped.pose.position.x + offset.position.x
pose_stamped.pose.position.y = pose_stamped.pose.position.y + offset.position.y
pose_stamped.pose.position.z = pose_stamped.pose.position.z + offset.position.z
pose_stamped.pose.orientation.x = pose_stamped.pose.orientation.x + offset.orientation.x
pose_stamped.pose.orientation.y = pose_stamped.pose.orientation.y + offset.orientation.y
pose_stamped.pose.orientation.z = pose_stamped.pose.orientation.z + offset.orientation.z
pose_stamped.pose.orientation.w = pose_stamped.pose.orientation.w + offset.orientation.w
# Append pose to IK request
ik_request.pose_stamp.append(pose_stamped)
except Exception as e:
rospy.logerr('Error when parsing poses/offsets and building inverse kinematics request: ' + repr(e))
raise
# Wait for service (important!)
self._ik_service_proxies[limb].wait_for_service(self._timeout)
# Receive response
try:
ik_response = self._ik_service_proxies[limb](ik_request)
except (rospy.ServiceException, rospy.ROSException), e:
rospy.logerr("Inverse kinematics service call failed: %s" % (e,))
return 'aborted'
# Check response validity and return result appropriately
resp_seeds = struct.unpack('<%dB' % len(ik_response.result_type), ik_response.result_type)
limb_joints = {}
if (resp_seeds[0] != ik_response.RESULT_INVALID):
seed_str = {
ik_request.SEED_USER: 'User Provided Seed',
ik_request.SEED_CURRENT: 'Current Joint Angles',
ik_request.SEED_NS_MAP: 'Nullspace Setpoints',
}.get(resp_seeds[0], 'None')
if self._verbose:
print("IK Solution SUCCESS - Valid Joint Solution Found from Seed Type: {0}".format(
(seed_str)))
# Format solution into Limb API-compatible dictionary
limb_joints = dict(zip(ik_response.joints[0].name, ik_response.joints[0].position))
if self._verbose:
print("IK Joint Solution:\n{0}".format(limb_joints))
print("------------------")
else:
rospy.logerr("INVALID POSE - No Valid Joint Solution Found.")
return 'aborted'
if any(response == ik_response.RESULT_INVALID for response in resp_seeds):
print('resp_seeds: {}'.format(resp_seeds))
return 'aborted'
else:
userdata.joints = ik_response.joints
return 'succeeded'
class GripperInterfaceState(smach.State):
def __init__(self, gripper_interfaces):
smach.State.__init__(self, input_keys = ['limb', 'command'], outcomes=['succeeded', 'aborted'])
self._gripper_interfaces = gripper_interfaces
def execute(self, userdata):
# Get limb + command from userdata
if 'limb' in userdata:
limb = userdata.limb
else:
raise ValueError('Limb should be specified in userdata!')
if 'command' in userdata:
command = userdata.command
else:
raise ValueError('Command should be specified in userdata!')
# Parse command
try:
if isinstance(command, str):
pass
elif isinstance(command, list):
if isinstance(command[0], str):
command = command[0]
else:
raise ValueError('Command should be specified as a string!')
else:
raise ValueError('Command should be specified as a string!')
except Exception as e:
rospy.logerr('Error when parsing gripper interface command: ' + repr(e))
raise
try:
if command == 'open':
self._gripper_interfaces[limb].open()
rospy.sleep(1.0)
elif command == 'close':
self._gripper_interfaces[limb].close()
rospy.sleep(1.0)
else:
raise ValueError('Command should be either \'open\' or \'close\'!')
except Exception as e:
rospy.logerr('Error when running gripper interface command: ' + repr(e))
raise
return 'succeeded'
def main():
rospy.init_node('baxter_smach_pick_and_place_test')
print("Initializing interfaces for each limb... ")
limb_interfaces = dict()
limb_interfaces['left'] = baxter_interface.Limb('left')
limb_interfaces['right'] = baxter_interface.Limb('right')
print("Initializing inverse kinematics service proxies for each limb... ")
ik_service_proxies = dict()
ik_service_proxies['left'] = rospy.ServiceProxy('/ExternalTools/left/PositionKinematicsNode/IKService', SolvePositionIK)
ik_service_proxies['right'] = rospy.ServiceProxy('/ExternalTools/right/PositionKinematicsNode/IKService', SolvePositionIK)
print("Initializing interfaces for each gripper... ")
gripper_interfaces = dict()
gripper_interfaces['left'] = baxter_interface.Gripper('left')
gripper_interfaces['right'] = baxter_interface.Gripper('right')
rs = baxter_interface.RobotEnable(CHECK_VERSION)
print("Enabling robot... ")
rs.enable()
sm = smach.StateMachine(outcomes=['succeeded', 'aborted', 'preempted'])
sm.userdata.hover_offset = [[0.0, 0.0, 0.15], [0.0, 0.0, 0.0, 0.0]]
sm.userdata.limb = 'left'
sm.userdata.table_model_pose_world = Pose(position=Point(x=1.0, y=0.0, z=0.0))
sm.userdata.table_model_ref_frame = 'world'
sm.userdata.block_model_pick_pose = [[0.7, 0.15, -0.129], [-0.02496, 0.99965, 0.00738, 0.00486]]
sm.userdata.block_model_pick_pose_world = [[0.6725, 0.1265, 0.7825], [0.0, 0.0, 0.0, 0.0]]
sm.userdata.block_model_pick_ref_frame = 'world'
sm.userdata.block_model_place_pose = [[0.75, 0.0, -0.129], [-0.02496, 0.99965, 0.00738, 0.00486]]
sm.userdata.joint_start_positions = [-0.08, -0.99998, -1.18997, 1.94002, 0.67, 1.03001, -0.5]
with sm:
smach.StateMachine.add('LOAD_TABLE_MODEL',
LoadGazeboModelState('cafe_table',
rospkg.RosPack().get_path('baxter_sim_examples')+'/models/cafe_table/model.sdf'),
transitions={'succeeded':'LOAD_BLOCK_MODEL'},
remapping={'pose':'table_model_pose_world',
'reference_frame':'table_model_ref_frame'})
smach.StateMachine.add('LOAD_BLOCK_MODEL',
LoadGazeboModelState('block',
rospkg.RosPack().get_path('baxter_sim_examples')+'/models/block/model.urdf'),
transitions={'succeeded':'MOVE_TO_START_POSITION'},
remapping={'pose':'block_model_pick_pose_world',
'reference_frame':'block_model_pick_ref_frame'})
smach.StateMachine.add('MOVE_TO_START_POSITION',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'PICK_BLOCK'},
remapping={'limb':'limb',
'positions':'joint_start_positions'})
sm_pick_block = smach.StateMachine(outcomes=['succeeded', 'aborted', 'preempted'],
input_keys = ['limb', 'pick_pose', 'hover_offset'])
sm_pick_block.userdata.close_command = 'close'
sm_pick_block.userdata.open_command = 'open'
with sm_pick_block:
smach.StateMachine.add('IK_PICK_BLOCK_HOVER_POSE',
PoseToJointTrajServiceState(ik_service_proxies),
transitions={'succeeded':'MOVE_TO_PICK_BLOCK_HOVER_POSE'},
remapping={'joints':'ik_joint_response_block_pick_hover_pose',
'limb':'limb',
'offsets':'hover_offset',
'poses':'pick_pose'})
smach.StateMachine.add('MOVE_TO_PICK_BLOCK_HOVER_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'OPEN_GRIPPER'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_pick_hover_pose'})
smach.StateMachine.add('OPEN_GRIPPER',
GripperInterfaceState(gripper_interfaces),
transitions={'succeeded':'IK_PICK_BLOCK_GRIP_POSE'},
remapping={'command':'open_command',
'limb':'limb'})
smach.StateMachine.add('IK_PICK_BLOCK_GRIP_POSE',
PoseToJointTrajServiceState(ik_service_proxies),
transitions={'succeeded':'MOVE_TO_PICK_BLOCK_GRIP_POSE'},
remapping={'joints':'ik_joint_response_block_pick_pose',
'limb':'limb',
'poses':'pick_pose'})
smach.StateMachine.add('MOVE_TO_PICK_BLOCK_GRIP_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'CLOSE_GRIPPER'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_pick_pose'})
smach.StateMachine.add('CLOSE_GRIPPER',
GripperInterfaceState(gripper_interfaces),
transitions={'succeeded':'MOVE_TO_GRIPPED_BLOCK_HOVER_POSE'},
remapping={'command':'close_command',
'limb':'limb'})
smach.StateMachine.add('MOVE_TO_GRIPPED_BLOCK_HOVER_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'succeeded'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_pick_hover_pose'})
smach.StateMachine.add('PICK_BLOCK', sm_pick_block,
transitions={'succeeded':'PLACE_BLOCK'},
remapping={'hover_offset':'hover_offset',
'limb':'limb',
'pick_pose':'block_model_pick_pose'})
sm_place_block = smach.StateMachine(outcomes=['succeeded', 'aborted', 'preempted'],
input_keys = ['limb', 'place_pose', 'hover_offset'])
sm_place_block.userdata.open_command = 'open'
with sm_place_block:
smach.StateMachine.add('IK_PLACE_BLOCK_HOVER_POSE',
PoseToJointTrajServiceState(ik_service_proxies),
transitions={'succeeded':'MOVE_TO_PLACE_BLOCK_HOVER_POSE'},
remapping={'joints':'ik_joint_response_block_place_hover_pose',
'limb':'limb',
'offsets':'hover_offset',
'poses':'place_pose'})
smach.StateMachine.add('MOVE_TO_PLACE_BLOCK_HOVER_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'IK_PLACE_BLOCK_RELEASE_POSE'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_place_hover_pose'})
smach.StateMachine.add('IK_PLACE_BLOCK_RELEASE_POSE',
PoseToJointTrajServiceState(ik_service_proxies),
transitions={'succeeded':'MOVE_TO_PLACE_BLOCK_RELEASE_POSE'},
remapping={'joints':'ik_joint_response_block_place_pose',
'limb':'limb',
'poses':'place_pose'})
smach.StateMachine.add('MOVE_TO_PLACE_BLOCK_RELEASE_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'OPEN_GRIPPER'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_place_pose'})
smach.StateMachine.add('OPEN_GRIPPER',
GripperInterfaceState(gripper_interfaces),
transitions={'succeeded':'MOVE_TO_RELEASED_BLOCK_HOVER_POSE'},
remapping={'command':'open_command',
'limb':'limb'})
smach.StateMachine.add('MOVE_TO_RELEASED_BLOCK_HOVER_POSE',
MoveToJointPositionsState(limb_interfaces),
transitions={'succeeded':'succeeded'},
remapping={'limb':'limb',
'positions':'ik_joint_response_block_place_hover_pose'})
smach.StateMachine.add('PLACE_BLOCK', sm_place_block,
transitions={'succeeded':'succeeded'},
remapping={'hover_offset':'hover_offset',
'limb':'limb',
'place_pose':'block_model_place_pose'})
sis = smach_ros.IntrospectionServer('baxter_smach_pick_and_place_test', sm, '/sm')
sis.start()
rospy.on_shutdown(lambda: delete_gazebo_model('block'))
rospy.on_shutdown(lambda: delete_gazebo_model('cafe_table'))
# Disable robot
rospy.on_shutdown(rs.disable)
try:
outcome = sm.execute()
print("Baxter SMACH Pick and Place Test Complete. Ctrl-C to exit.")
rospy.spin()
except Exception as e:
rospy.logerr('Error when executing state machine: ' + repr(e))
rospy.signal_shutdown('Error when executing state machine: ' + repr(e))
if __name__ == '__main__':
main()
|
{
"content_hash": "cec8c3955151f0327e50462ff547ae38",
"timestamp": "",
"source": "github",
"line_count": 649,
"max_line_length": 150,
"avg_line_length": 41.59784283513097,
"alnum_prop": 0.5236507760121495,
"repo_name": "abr-ijs/baxter_smacha",
"id": "c9e28f1b1a374f93794bcf7b39e142018ff4d5fb",
"size": "27024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smacha_generated/pick_and_place_demo.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "6925"
},
{
"name": "Python",
"bytes": "172960"
},
{
"name": "Shell",
"bytes": "131"
},
{
"name": "Smarty",
"bytes": "25724"
}
],
"symlink_target": ""
}
|
"""Creates files required to feed into trybot_commit_size_checker"""
import argparse
import json
import logging
import os
import shutil
import subprocess
_SRC_ROOT = os.path.normpath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
_RESOURCE_SIZES_PATH = os.path.join(_SRC_ROOT, 'build', 'android',
'resource_sizes.py')
_BINARY_SIZE_DIR = os.path.join(_SRC_ROOT, 'tools', 'binary_size')
_CLANG_UPDATE_PATH = os.path.join(_SRC_ROOT, 'tools', 'clang', 'scripts',
'update.py')
def _copy_files_to_staging_dir(files_to_copy, make_staging_path):
"""Copies files from output directory to staging_dir"""
for filename in files_to_copy:
shutil.copy(filename, make_staging_path(filename))
def _generate_resource_sizes(to_resource_sizes_py, make_chromium_output_path,
make_staging_path):
"""Creates results-chart.json file in staging_dir"""
cmd = [
_RESOURCE_SIZES_PATH,
make_chromium_output_path(to_resource_sizes_py['apk_name']),
'--output-format=chartjson',
'--chromium-output-directory',
make_chromium_output_path(),
'--output-dir',
make_staging_path(),
]
FORWARDED_PARAMS = [
('--trichrome-library', make_chromium_output_path, 'trichrome_library'),
('--trichrome-chrome', make_chromium_output_path, 'trichrome_chrome'),
('--trichrome-webview', make_chromium_output_path, 'trichrome_webview'),
]
for switch, fun, key in FORWARDED_PARAMS:
if key in to_resource_sizes_py:
cmd += [switch, fun(to_resource_sizes_py[key])]
subprocess.run(cmd, check=True)
def _generate_supersize_archive(supersize_input_file, make_chromium_output_path,
make_staging_path):
"""Creates a .size file for the given .apk or .minimal.apks"""
subprocess.run([_CLANG_UPDATE_PATH, '--package=objdump'], check=True)
supersize_input_path = make_chromium_output_path(supersize_input_file)
size_path = make_staging_path(supersize_input_file) + '.size'
supersize_script_path = os.path.join(_BINARY_SIZE_DIR, 'supersize')
subprocess.run(
[
supersize_script_path,
'archive',
size_path,
'-f',
supersize_input_path,
'-v',
],
check=True,
)
def main():
parser = argparse.ArgumentParser()
# A size config JSON specifies files relative to --chromium-output-directory.
# Its fields are:
# * mapping_files: A list of .mapping files, to be copied to --staging-dir for
# SuperSize and trybot_commit_size_checker.py (indirectly). SuperSize
# deduces mapping filenames; there's no need to pass these to it directly.
# * resource_size_args: A dict of arguments for resource_sizes.py. Its
# sub-fields are:
# * apk_name: Required main input, although for Trichrome this can be a
# placeholder name.
# * trichrome_library: --trichrome-library param (Trichrome only).
# * trichrome_chrome: --trichrome-chrome param (Trichrome only).
# * trichrome_webview: --trichrome-webview param (Trichrome only).
# * supersize_input_file: Main input for SuperSize, and can be {.apk,
# .minimal.apks, .ssargs}. If .ssargs, then the file is copied to the
# staging dir.
parser.add_argument('--size-config-json',
required=True,
help='Path to JSON file with configs for binary size '
'measurement.')
parser.add_argument(
'--chromium-output-directory',
required=True,
help='Location of the build artifacts.',
)
parser.add_argument(
'--staging-dir',
required=True,
help='Directory to write generated files to.',
)
args = parser.parse_args()
with open(args.size_config_json, 'rt') as fh:
config = json.load(fh)
to_resource_sizes_py = config['to_resource_sizes_py']
mapping_files = config['mapping_files']
supersize_input_file = config['supersize_input_file']
def make_chromium_output_path(path_rel_to_output=None):
if path_rel_to_output is None:
return args.chromium_output_directory
return os.path.join(args.chromium_output_directory, path_rel_to_output)
# N.B. os.path.basename() usage.
def make_staging_path(path_rel_to_output=None):
if path_rel_to_output is None:
return args.staging_dir
return os.path.join(args.staging_dir, os.path.basename(path_rel_to_output))
files_to_copy = [make_chromium_output_path(f) for f in mapping_files]
# Copy size config JSON and .ssargs to staging dir to save settings used.
if args.size_config_json:
files_to_copy.append(args.size_config_json)
if supersize_input_file.endswith('.ssargs'):
files_to_copy.append(make_chromium_output_path(supersize_input_file))
_copy_files_to_staging_dir(files_to_copy, make_staging_path)
_generate_resource_sizes(
to_resource_sizes_py,
make_chromium_output_path,
make_staging_path,
)
_generate_supersize_archive(
supersize_input_file,
make_chromium_output_path,
make_staging_path,
)
if __name__ == '__main__':
main()
|
{
"content_hash": "e051e9b477b6414deeefa08978cf099b",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 80,
"avg_line_length": 35.6875,
"alnum_prop": 0.6553804242070441,
"repo_name": "scheib/chromium",
"id": "ca2b7cebefb8a17a7f64d45231a1fc4b29d188fa",
"size": "5344",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tools/binary_size/generate_commit_size_analysis.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Implementation of the Image-to-Image Translation model.
This network represents a port of the following work:
Image-to-Image Translation with Conditional Adversarial Networks
Phillip Isola, Jun-Yan Zhu, Tinghui Zhou and Alexei A. Efros
Arxiv, 2017
https://phillipi.github.io/pix2pix/
A reference implementation written in Lua can be found at:
https://github.com/phillipi/pix2pix/blob/master/models.lua
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import tensorflow.compat.v1 as tf
import tf_slim as slim
def pix2pix_arg_scope():
"""Returns a default argument scope for isola_net.
Returns:
An arg scope.
"""
# These parameters come from the online port, which don't necessarily match
# those in the paper.
# TODO(nsilberman): confirm these values with Philip.
instance_norm_params = {
'center': True,
'scale': True,
'epsilon': 0.00001,
}
with slim.arg_scope(
[slim.conv2d, slim.conv2d_transpose],
normalizer_fn=slim.instance_norm,
normalizer_params=instance_norm_params,
weights_initializer=tf.random_normal_initializer(0, 0.02)) as sc:
return sc
def upsample(net, num_outputs, kernel_size, method='nn_upsample_conv'):
"""Upsamples the given inputs.
Args:
net: A `Tensor` of size [batch_size, height, width, filters].
num_outputs: The number of output filters.
kernel_size: A list of 2 scalars or a 1x2 `Tensor` indicating the scale,
relative to the inputs, of the output dimensions. For example, if kernel
size is [2, 3], then the output height and width will be twice and three
times the input size.
method: The upsampling method.
Returns:
An `Tensor` which was upsampled using the specified method.
Raises:
ValueError: if `method` is not recognized.
"""
net_shape = tf.shape(input=net)
height = net_shape[1]
width = net_shape[2]
if method == 'nn_upsample_conv':
net = tf.image.resize(
net, [kernel_size[0] * height, kernel_size[1] * width],
method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
net = slim.conv2d(net, num_outputs, [4, 4], activation_fn=None)
elif method == 'conv2d_transpose':
net = slim.conv2d_transpose(
net, num_outputs, [4, 4], stride=kernel_size, activation_fn=None)
else:
raise ValueError('Unknown method: [%s]' % method)
return net
class Block(
collections.namedtuple('Block', ['num_filters', 'decoder_keep_prob'])):
"""Represents a single block of encoder and decoder processing.
The Image-to-Image translation paper works a bit differently than the original
U-Net model. In particular, each block represents a single operation in the
encoder which is concatenated with the corresponding decoder representation.
A dropout layer follows the concatenation and convolution of the concatenated
features.
"""
pass
def _default_generator_blocks():
"""Returns the default generator block definitions.
Returns:
A list of generator blocks.
"""
return [
Block(64, 0.5),
Block(128, 0.5),
Block(256, 0.5),
Block(512, 0),
Block(512, 0),
Block(512, 0),
Block(512, 0),
]
def pix2pix_generator(net,
num_outputs,
blocks=None,
upsample_method='nn_upsample_conv',
is_training=False): # pylint: disable=unused-argument
"""Defines the network architecture.
Args:
net: A `Tensor` of size [batch, height, width, channels]. Note that the
generator currently requires square inputs (e.g. height=width).
num_outputs: The number of (per-pixel) outputs.
blocks: A list of generator blocks or `None` to use the default generator
definition.
upsample_method: The method of upsampling images, one of 'nn_upsample_conv'
or 'conv2d_transpose'
is_training: Whether or not we're in training or testing mode.
Returns:
A `Tensor` representing the model output and a dictionary of model end
points.
Raises:
ValueError: if the input heights do not match their widths.
"""
end_points = {}
blocks = blocks or _default_generator_blocks()
input_size = net.get_shape().as_list()
input_size[3] = num_outputs
upsample_fn = functools.partial(upsample, method=upsample_method)
encoder_activations = []
###########
# Encoder #
###########
with tf.variable_scope('encoder'):
with slim.arg_scope([slim.conv2d],
kernel_size=[4, 4],
stride=2,
activation_fn=tf.nn.leaky_relu):
for block_id, block in enumerate(blocks):
# No normalizer for the first encoder layers as per 'Image-to-Image',
# Section 5.1.1
if block_id == 0:
# First layer doesn't use normalizer_fn
net = slim.conv2d(net, block.num_filters, normalizer_fn=None)
elif block_id < len(blocks) - 1:
net = slim.conv2d(net, block.num_filters)
else:
# Last layer doesn't use activation_fn nor normalizer_fn
net = slim.conv2d(
net, block.num_filters, activation_fn=None, normalizer_fn=None)
encoder_activations.append(net)
end_points['encoder%d' % block_id] = net
###########
# Decoder #
###########
reversed_blocks = list(blocks)
reversed_blocks.reverse()
with tf.variable_scope('decoder'):
# Dropout is used at both train and test time as per 'Image-to-Image',
# Section 2.1 (last paragraph).
with slim.arg_scope([slim.dropout], is_training=True):
for block_id, block in enumerate(reversed_blocks):
if block_id > 0:
net = tf.concat([net, encoder_activations[-block_id - 1]], axis=3)
# The Relu comes BEFORE the upsample op:
net = tf.nn.relu(net)
net = upsample_fn(net, block.num_filters, [2, 2])
if block.decoder_keep_prob > 0:
net = slim.dropout(net, keep_prob=block.decoder_keep_prob)
end_points['decoder%d' % block_id] = net
with tf.variable_scope('output'):
# Explicitly set the normalizer_fn to None to override any default value
# that may come from an arg_scope, such as pix2pix_arg_scope.
logits = slim.conv2d(
net, num_outputs, [4, 4], activation_fn=None, normalizer_fn=None)
logits = tf.reshape(logits, input_size)
end_points['logits'] = logits
end_points['predictions'] = tf.tanh(logits)
return logits, end_points
def pix2pix_discriminator(net, num_filters, padding=2, pad_mode='REFLECT',
activation_fn=tf.nn.leaky_relu, is_training=False):
"""Creates the Image2Image Translation Discriminator.
Args:
net: A `Tensor` of size [batch_size, height, width, channels] representing
the input.
num_filters: A list of the filters in the discriminator. The length of the
list determines the number of layers in the discriminator.
padding: Amount of reflection padding applied before each convolution.
pad_mode: mode for tf.pad, one of "CONSTANT", "REFLECT", or "SYMMETRIC".
activation_fn: activation fn for slim.conv2d.
is_training: Whether or not the model is training or testing.
Returns:
A logits `Tensor` of size [batch_size, N, N, 1] where N is the number of
'patches' we're attempting to discriminate and a dictionary of model end
points.
"""
del is_training
end_points = {}
num_layers = len(num_filters)
def padded(net, scope):
if padding:
with tf.variable_scope(scope):
spatial_pad = tf.constant(
[[0, 0], [padding, padding], [padding, padding], [0, 0]],
dtype=tf.int32)
return tf.pad(tensor=net, paddings=spatial_pad, mode=pad_mode)
else:
return net
with slim.arg_scope([slim.conv2d],
kernel_size=[4, 4],
stride=2,
padding='valid',
activation_fn=activation_fn):
# No normalization on the input layer.
net = slim.conv2d(
padded(net, 'conv0'), num_filters[0], normalizer_fn=None, scope='conv0')
end_points['conv0'] = net
for i in range(1, num_layers - 1):
net = slim.conv2d(
padded(net, 'conv%d' % i), num_filters[i], scope='conv%d' % i)
end_points['conv%d' % i] = net
# Stride 1 on the last layer.
net = slim.conv2d(
padded(net, 'conv%d' % (num_layers - 1)),
num_filters[-1],
stride=1,
scope='conv%d' % (num_layers - 1))
end_points['conv%d' % (num_layers - 1)] = net
# 1-dim logits, stride 1, no activation, no normalization.
logits = slim.conv2d(
padded(net, 'conv%d' % num_layers),
1,
stride=1,
activation_fn=None,
normalizer_fn=None,
scope='conv%d' % num_layers)
end_points['logits'] = logits
end_points['predictions'] = tf.sigmoid(logits)
return logits, end_points
|
{
"content_hash": "a8a44b34abde7d15faba40fc41001320",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 80,
"avg_line_length": 32.34767025089606,
"alnum_prop": 0.639224376731302,
"repo_name": "tombstone/models",
"id": "9e0e5708145f953a0b947fec85d53f30e1106c20",
"size": "9709",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "research/slim/nets/pix2pix.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1365199"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "1858048"
},
{
"name": "Makefile",
"bytes": "4763"
},
{
"name": "Python",
"bytes": "7241242"
},
{
"name": "Shell",
"bytes": "102270"
},
{
"name": "TypeScript",
"bytes": "6515"
}
],
"symlink_target": ""
}
|
"""Support for the Hive alarm."""
from datetime import timedelta
from homeassistant.components.alarm_control_panel import AlarmControlPanelEntity
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import HiveEntity
from .const import DOMAIN
ICON = "mdi:security"
PARALLEL_UPDATES = 0
SCAN_INTERVAL = timedelta(seconds=15)
HIVETOHA = {
"home": STATE_ALARM_DISARMED,
"asleep": STATE_ALARM_ARMED_NIGHT,
"away": STATE_ALARM_ARMED_AWAY,
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Hive thermostat based on a config entry."""
hive = hass.data[DOMAIN][entry.entry_id]
if devices := hive.session.deviceList.get("alarm_control_panel"):
async_add_entities(
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
)
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
"""Representation of a Hive alarm."""
_attr_icon = ICON
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def device_info(self) -> DeviceInfo:
"""Return device information about this AdGuard Home instance."""
return DeviceInfo(
identifiers={(DOMAIN, self.device["device_id"])},
model=self.device["deviceData"]["model"],
manufacturer=self.device["deviceData"]["manufacturer"],
name=self.device["device_name"],
sw_version=self.device["deviceData"]["version"],
via_device=(DOMAIN, self.device["parentDevice"]),
)
@property
def name(self):
"""Return the name of the alarm."""
return self.device["haName"]
@property
def available(self):
"""Return if the device is available."""
return self.device["deviceData"]["online"]
@property
def state(self):
"""Return state of alarm."""
if self.device["status"]["state"]:
return STATE_ALARM_TRIGGERED
return HIVETOHA[self.device["status"]["mode"]]
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_ALARM_ARM_NIGHT | SUPPORT_ALARM_ARM_AWAY
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
await self.hive.alarm.setMode(self.device, "home")
async def async_alarm_arm_night(self, code=None):
"""Send arm night command."""
await self.hive.alarm.setMode(self.device, "asleep")
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
await self.hive.alarm.setMode(self.device, "away")
async def async_update(self):
"""Update all Node data from Hive."""
await self.hive.session.updateData(self.device)
self.device = await self.hive.alarm.getAlarm(self.device)
|
{
"content_hash": "4accde3da59fa15ba1901f4343a97b60",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 84,
"avg_line_length": 32.40384615384615,
"alnum_prop": 0.6670623145400594,
"repo_name": "rohitranjan1991/home-assistant",
"id": "4a0ad577f90f88f900418ddf3efe7ff693900e8b",
"size": "3370",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/hive/alarm_control_panel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.conf import settings
from django.db import IntegrityError
from spirit.signals.topic import topic_viewed
from ..utils import paginator
class CommentBookmark(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_("user"))
topic = models.ForeignKey('spirit.Topic')
comment_number = models.PositiveIntegerField(default=0)
class Meta:
app_label = 'spirit'
unique_together = ('user', 'topic')
verbose_name = _("comment bookmark")
verbose_name_plural = _("comments bookmarks")
def get_absolute_url(self):
return paginator.get_url(self.topic.get_absolute_url(),
self.comment_number,
settings.ST_COMMENTS_PER_PAGE,
settings.ST_COMMENTS_PAGE_VAR)
def __unicode__(self):
return "%s bookmarked comment %s in %s" % (self.user.username,
self.topic.title,
self.comment_number)
def topic_page_viewed_handler(sender, request, topic, **kwargs):
if not request.user.is_authenticated():
return
try:
page_number = int(request.GET.get(settings.ST_COMMENTS_PAGE_VAR, 1))
except ValueError:
return
comment_number = settings.ST_COMMENTS_PER_PAGE * (page_number - 1) + 1
# TODO: use update_or_create on django 1.7
try:
CommentBookmark.objects.create(user=request.user, topic=topic, comment_number=comment_number)
except IntegrityError:
CommentBookmark.objects.filter(user=request.user, topic=topic)\
.update(comment_number=comment_number)
topic_viewed.connect(topic_page_viewed_handler, dispatch_uid=__name__)
|
{
"content_hash": "7b6b3f1762ed7aeb855ae7895a7b3e90",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 101,
"avg_line_length": 34.89090909090909,
"alnum_prop": 0.6310578426263679,
"repo_name": "bjorncooley/rainforest_makers",
"id": "66863dbdf71f546244418eda58b620371816983f",
"size": "1943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spirit/models/comment_bookmark.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "94665"
},
{
"name": "CoffeeScript",
"bytes": "54667"
},
{
"name": "JavaScript",
"bytes": "97637"
},
{
"name": "Python",
"bytes": "395030"
}
],
"symlink_target": ""
}
|
from django.db import connection
def make_key(key, key_prefix, version):
"""
Tenant aware function to generate a cache key.
Constructs the key used by all other methods. Prepends the tenant
`schema_name` and `key_prefix'.
"""
return '%s:%s:%s:%s' % (connection.schema_name, key_prefix, version, key)
def reverse_key(key):
"""
Tenant aware function to reverse a cache key.
Required for django-redis REVERSE_KEY_FUNCTION setting.
"""
return key.split(':', 3)[3]
|
{
"content_hash": "6f40dc6af90189cbce782a98adef5377",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 77,
"avg_line_length": 25.5,
"alnum_prop": 0.6588235294117647,
"repo_name": "honur/django-tenant-schemas",
"id": "9a4a536c5a798f418c117bb454fd9e85dffdadb6",
"size": "510",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "tenant_schemas/cache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "82543"
}
],
"symlink_target": ""
}
|
import re
def pointer(string):
string = str(string)[::-1]
splitted = re.split("(\d\d\d)", string)
final = [x for x in splitted if x is not '']
return ".".join(final)[::-1]
print pointer( raw_input("Your number ->") )
|
{
"content_hash": "5e52a75a25ca354c2aecd2b393a6ca13",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 48,
"avg_line_length": 26.22222222222222,
"alnum_prop": 0.5847457627118644,
"repo_name": "SGanT/olymp",
"id": "7d41b275a297349bc041257adb35e2c095ed543e",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Homework/Points_in_number/pointer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2568"
}
],
"symlink_target": ""
}
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zilencer', '0008_customer_billing_user'),
]
operations = [
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nickname', models.CharField(max_length=40, unique=True)),
('stripe_plan_id', models.CharField(max_length=255, unique=True)),
],
),
]
|
{
"content_hash": "f8d9c5b5764866b5c5f9c7860ffb4fbc",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 114,
"avg_line_length": 29.789473684210527,
"alnum_prop": 0.5671378091872792,
"repo_name": "brainwane/zulip",
"id": "05c73567459f87ee2678f94da6826cfb5654dfa4",
"size": "617",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "zilencer/migrations/0009_plan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "423578"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "647926"
},
{
"name": "JavaScript",
"bytes": "2886792"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "90558"
},
{
"name": "Python",
"bytes": "6000548"
},
{
"name": "Ruby",
"bytes": "249744"
},
{
"name": "Shell",
"bytes": "110849"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
}
|
import subprocess
from radiopadre.file import FileBase
from radiopadre.render import render_url, render_error
from radiopadre import imagefile
class PDFFile(FileBase):
def __init__(self, *args, **kw):
FileBase.__init__(self, *args, **kw)
def render_html(self,**kw):
return self.render_thumb(**kw)
def _render_thumb_impl(self, npix=None, **kw):
thumbnail, thumbnail_url, update = self._get_cache_file("pdf-render", "png")
npix = npix or 800
if update:
cmd = "gs -sDEVICE=png16m -sOutputFile={thumbnail} -dLastPage=1 -r300 -dDownScaleFactor=4 -dBATCH " \
"-dNOPAUSE {self.fullpath}".format(**locals())
try:
output = subprocess.check_output(cmd, shell=True)
except subprocess.CalledProcessError as exc:
print cmd, exc.output
return render_error("phantomjs error (code {})".format(exc.returncode))
return imagefile.ImageFile._render_thumbnail(thumbnail, url=render_url(self.fullpath), npix=npix) + "\n"
|
{
"content_hash": "75b05c11b9087cc1ddeaa507deff06f1",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 113,
"avg_line_length": 39.48148148148148,
"alnum_prop": 0.6303939962476548,
"repo_name": "radio-astro/radiopadre",
"id": "9c8ba414f45620da3e17c8a4d11e591bdf4b4cc0",
"size": "1066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "radiopadre/pdffile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "8474"
},
{
"name": "Jupyter Notebook",
"bytes": "13025"
},
{
"name": "Python",
"bytes": "54154"
},
{
"name": "Shell",
"bytes": "3463"
}
],
"symlink_target": ""
}
|
import bee
from bee.segments import *
import spyder, Spyder
import spyder.formtools
import libcontext
class externblock(object):
metaguiparams = {"spydertype": "type"}
def __new__(cls, spydertype):
assert spyder.validvar2(spydertype), spydertype
class externblock(bee.worker):
block_ = antenna("pull", "block")
b_block = buffer("pull", "block")
connect(block_, b_block)
get_block = triggerfunc(b_block)
control = antenna_blockcontrol()
model = output("pull", "blockmodel")
v_model = variable("blockmodel")
connect(v_model, model)
@modifier
def _init(self):
if self._initialized: return
self.get_block()
model = self.b_block
self.v_model = model
self._initialized = True
def place(self):
self._initialized = False
self.control.set_blockcontrol(lambda: self.v_model)
pretrigger(v_model, _init)
return externblock
|
{
"content_hash": "cdd32d11d7347a96546fe1dca643e6cd",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 67,
"avg_line_length": 25.954545454545453,
"alnum_prop": 0.5350262697022767,
"repo_name": "agoose77/hivesystem",
"id": "ba24219eb741c77d70350a4ce620a771a56f71bb",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dragonfly/blocks/externblock.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "2491478"
},
{
"name": "Shell",
"bytes": "1164"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'InstitutionUserProfile.user'
db.delete_column(u'heliosinstitution_institutionuserprofile', 'user_id')
# Adding field 'InstitutionUserProfile.helios_user'
db.add_column(u'heliosinstitution_institutionuserprofile', 'helios_user',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['helios_auth.User'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'InstitutionUserProfile.user'
db.add_column(u'heliosinstitution_institutionuserprofile', 'user',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['helios_auth.User'], null=True, blank=True),
keep_default=False)
# Deleting field 'InstitutionUserProfile.helios_user'
db.delete_column(u'heliosinstitution_institutionuserprofile', 'helios_user_id')
models = {
u'helios_auth.user': {
'Meta': {'unique_together': "(('user_type', 'user_id'),)", 'object_name': 'User'},
'admin_p': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('helios_auth.jsonfield.JSONField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'token': ('helios_auth.jsonfield.JSONField', [], {'null': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user_type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'heliosinstitution.institution': {
'Meta': {'object_name': 'Institution'},
'address': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idp_address': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'main_phone': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'mngt_email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'sec_phone': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'heliosinstitution.institutionuserprofile': {
'Meta': {'object_name': 'InstitutionUserProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'helios_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['helios_auth.User']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['heliosinstitution.Institution']"})
}
}
complete_apps = ['heliosinstitution']
|
{
"content_hash": "80f3a4bd3bec0a7af1df3aa0d18920a7",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 166,
"avg_line_length": 59.25806451612903,
"alnum_prop": 0.5860097985846489,
"repo_name": "shirlei/helios-server",
"id": "7d645e0ca94a0d953a09be4d1721373d33c04aeb",
"size": "3698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heliosinstitution/south_migrations/0007_auto__del_field_institutionuserprofile_user__add_field_institutionuser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "82677"
},
{
"name": "HTML",
"bytes": "433803"
},
{
"name": "Java",
"bytes": "2271"
},
{
"name": "JavaScript",
"bytes": "473703"
},
{
"name": "Python",
"bytes": "829739"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
}
|
from __future__ import division, absolute_import, print_function
# Code common to build tools
import sys
from os.path import join
import warnings
import copy
import binascii
from distutils.ccompiler import CompileError
#-------------------
# Versioning support
#-------------------
# How to change C_API_VERSION ?
# - increase C_API_VERSION value
# - record the hash for the new C API with the script cversions.py
# and add the hash to cversions.txt
# The hash values are used to remind developers when the C API number was not
# updated - generates a MismatchCAPIWarning warning which is turned into an
# exception for released version.
# Binary compatibility version number. This number is increased whenever the
# C-API is changed such that binary compatibility is broken, i.e. whenever a
# recompile of extension modules is needed.
C_ABI_VERSION = 0x01000009
# Minor API version. This number is increased whenever a change is made to the
# C-API -- whether it breaks binary compatibility or not. Some changes, such
# as adding a function pointer to the end of the function table, can be made
# without breaking binary compatibility. In this case, only the C_API_VERSION
# (*not* C_ABI_VERSION) would be increased. Whenever binary compatibility is
# broken, both C_API_VERSION and C_ABI_VERSION should be increased.
#
# 0x00000008 - 1.7.x
# 0x00000009 - 1.8.x
C_API_VERSION = 0x00000009
class MismatchCAPIWarning(Warning):
pass
def is_released(config):
"""Return True if a released version of numpy is detected."""
from distutils.version import LooseVersion
v = config.get_version('../version.py')
if v is None:
raise ValueError("Could not get version")
pv = LooseVersion(vstring=v).version
if len(pv) > 3:
return False
return True
def get_api_versions(apiversion, codegen_dir):
"""Return current C API checksum and the recorded checksum for the given
version of the C API version."""
api_files = [join(codegen_dir, 'numpy_api_order.txt'),
join(codegen_dir, 'ufunc_api_order.txt')]
# Compute the hash of the current API as defined in the .txt files in
# code_generators
sys.path.insert(0, codegen_dir)
try:
m = __import__('genapi')
numpy_api = __import__('numpy_api')
curapi_hash = m.fullapi_hash(numpy_api.full_api)
apis_hash = m.get_versions_hash()
finally:
del sys.path[0]
return curapi_hash, apis_hash[apiversion]
def check_api_version(apiversion, codegen_dir):
"""Emits a MismacthCAPIWarning if the C API version needs updating."""
curapi_hash, api_hash = get_api_versions(apiversion, codegen_dir)
# If different hash, it means that the api .txt files in
# codegen_dir have been updated without the API version being
# updated. Any modification in those .txt files should be reflected
# in the api and eventually abi versions.
# To compute the checksum of the current API, use
# code_generators/cversions.py script
if not curapi_hash == api_hash:
msg = "API mismatch detected, the C API version " \
"numbers have to be updated. Current C api version is %d, " \
"with checksum %s, but recorded checksum for C API version %d in " \
"codegen_dir/cversions.txt is %s. If functions were added in the " \
"C API, you have to update C_API_VERSION in %s."
warnings.warn(msg % (apiversion, curapi_hash, apiversion, api_hash,
__file__),
MismatchCAPIWarning)
# Mandatory functions: if not found, fail the build
MANDATORY_FUNCS = ["sin", "cos", "tan", "sinh", "cosh", "tanh", "fabs",
"floor", "ceil", "sqrt", "log10", "log", "exp", "asin",
"acos", "atan", "fmod", 'modf', 'frexp', 'ldexp']
# Standard functions which may not be available and for which we have a
# replacement implementation. Note that some of these are C99 functions.
OPTIONAL_STDFUNCS = ["expm1", "log1p", "acosh", "asinh", "atanh",
"rint", "trunc", "exp2", "log2", "hypot", "atan2", "pow",
"copysign", "nextafter"]
OPTIONAL_HEADERS = [
# sse headers only enabled automatically on amd64/x32 builds
"xmmintrin.h", # SSE
"emmintrin.h", # SSE2
]
# optional gcc compiler builtins and their call arguments
# call arguments are required as the compiler will do strict signature checking
OPTIONAL_INTRINSICS = [("__builtin_isnan", '5.'),
("__builtin_isinf", '5.'),
("__builtin_isfinite", '5.'),
("__builtin_bswap32", '5u'),
("__builtin_bswap64", '5u'),
("__builtin_expect", '5, 0'),
]
# gcc function attributes
# (attribute as understood by gcc, function name),
# function name will be converted to HAVE_<upper-case-name> preprocessor macro
OPTIONAL_GCC_ATTRIBUTES = [('__attribute__((optimize("unroll-loops")))',
'attribute_optimize_unroll_loops'),
]
# Subset of OPTIONAL_STDFUNCS which may alreay have HAVE_* defined by Python.h
OPTIONAL_STDFUNCS_MAYBE = ["expm1", "log1p", "acosh", "atanh", "asinh", "hypot",
"copysign"]
# C99 functions: float and long double versions
C99_FUNCS = ["sin", "cos", "tan", "sinh", "cosh", "tanh", "fabs", "floor",
"ceil", "rint", "trunc", "sqrt", "log10", "log", "log1p", "exp",
"expm1", "asin", "acos", "atan", "asinh", "acosh", "atanh",
"hypot", "atan2", "pow", "fmod", "modf", 'frexp', 'ldexp',
"exp2", "log2", "copysign", "nextafter"]
C99_FUNCS_SINGLE = [f + 'f' for f in C99_FUNCS]
C99_FUNCS_EXTENDED = [f + 'l' for f in C99_FUNCS]
C99_COMPLEX_TYPES = ['complex double', 'complex float', 'complex long double']
C99_COMPLEX_FUNCS = ['creal', 'cimag', 'cabs', 'carg', 'cexp', 'csqrt', 'clog',
'ccos', 'csin', 'cpow']
def fname2def(name):
return "HAVE_%s" % name.upper()
def sym2def(symbol):
define = symbol.replace(' ', '')
return define.upper()
def type2def(symbol):
define = symbol.replace(' ', '_')
return define.upper()
# Code to detect long double representation taken from MPFR m4 macro
def check_long_double_representation(cmd):
cmd._check_compiler()
body = LONG_DOUBLE_REPRESENTATION_SRC % {'type': 'long double'}
# We need to use _compile because we need the object filename
src, object = cmd._compile(body, None, None, 'c')
try:
type = long_double_representation(pyod(object))
return type
finally:
cmd._clean()
LONG_DOUBLE_REPRESENTATION_SRC = r"""
/* "before" is 16 bytes to ensure there's no padding between it and "x".
* We're not expecting any "long double" bigger than 16 bytes or with
* alignment requirements stricter than 16 bytes. */
typedef %(type)s test_type;
struct {
char before[16];
test_type x;
char after[8];
} foo = {
{ '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0',
'\001', '\043', '\105', '\147', '\211', '\253', '\315', '\357' },
-123456789.0,
{ '\376', '\334', '\272', '\230', '\166', '\124', '\062', '\020' }
};
"""
def pyod(filename):
"""Python implementation of the od UNIX utility (od -b, more exactly).
Parameters
----------
filename : str
name of the file to get the dump from.
Returns
-------
out : seq
list of lines of od output
Note
----
We only implement enough to get the necessary information for long double
representation, this is not intended as a compatible replacement for od.
"""
def _pyod2():
out = []
fid = open(filename, 'rb')
try:
yo = [int(oct(int(binascii.b2a_hex(o), 16))) for o in fid.read()]
for i in range(0, len(yo), 16):
line = ['%07d' % int(oct(i))]
line.extend(['%03d' % c for c in yo[i:i+16]])
out.append(" ".join(line))
return out
finally:
fid.close()
def _pyod3():
out = []
fid = open(filename, 'rb')
try:
yo2 = [oct(o)[2:] for o in fid.read()]
for i in range(0, len(yo2), 16):
line = ['%07d' % int(oct(i)[2:])]
line.extend(['%03d' % int(c) for c in yo2[i:i+16]])
out.append(" ".join(line))
return out
finally:
fid.close()
if sys.version_info[0] < 3:
return _pyod2()
else:
return _pyod3()
_BEFORE_SEQ = ['000', '000', '000', '000', '000', '000', '000', '000',
'001', '043', '105', '147', '211', '253', '315', '357']
_AFTER_SEQ = ['376', '334', '272', '230', '166', '124', '062', '020']
_IEEE_DOUBLE_BE = ['301', '235', '157', '064', '124', '000', '000', '000']
_IEEE_DOUBLE_LE = _IEEE_DOUBLE_BE[::-1]
_INTEL_EXTENDED_12B = ['000', '000', '000', '000', '240', '242', '171', '353',
'031', '300', '000', '000']
_INTEL_EXTENDED_16B = ['000', '000', '000', '000', '240', '242', '171', '353',
'031', '300', '000', '000', '000', '000', '000', '000']
_MOTOROLA_EXTENDED_12B = ['300', '031', '000', '000', '353', '171',
'242', '240', '000', '000', '000', '000']
_IEEE_QUAD_PREC_BE = ['300', '031', '326', '363', '105', '100', '000', '000',
'000', '000', '000', '000', '000', '000', '000', '000']
_IEEE_QUAD_PREC_LE = _IEEE_QUAD_PREC_BE[::-1]
_DOUBLE_DOUBLE_BE = ['301', '235', '157', '064', '124', '000', '000', '000'] + \
['000'] * 8
def long_double_representation(lines):
"""Given a binary dump as given by GNU od -b, look for long double
representation."""
# Read contains a list of 32 items, each item is a byte (in octal
# representation, as a string). We 'slide' over the output until read is of
# the form before_seq + content + after_sequence, where content is the long double
# representation:
# - content is 12 bytes: 80 bits Intel representation
# - content is 16 bytes: 80 bits Intel representation (64 bits) or quad precision
# - content is 8 bytes: same as double (not implemented yet)
read = [''] * 32
saw = None
for line in lines:
# we skip the first word, as od -b output an index at the beginning of
# each line
for w in line.split()[1:]:
read.pop(0)
read.append(w)
# If the end of read is equal to the after_sequence, read contains
# the long double
if read[-8:] == _AFTER_SEQ:
saw = copy.copy(read)
if read[:12] == _BEFORE_SEQ[4:]:
if read[12:-8] == _INTEL_EXTENDED_12B:
return 'INTEL_EXTENDED_12_BYTES_LE'
if read[12:-8] == _MOTOROLA_EXTENDED_12B:
return 'MOTOROLA_EXTENDED_12_BYTES_BE'
elif read[:8] == _BEFORE_SEQ[8:]:
if read[8:-8] == _INTEL_EXTENDED_16B:
return 'INTEL_EXTENDED_16_BYTES_LE'
elif read[8:-8] == _IEEE_QUAD_PREC_BE:
return 'IEEE_QUAD_BE'
elif read[8:-8] == _IEEE_QUAD_PREC_LE:
return 'IEEE_QUAD_LE'
elif read[8:-8] == _DOUBLE_DOUBLE_BE:
return 'DOUBLE_DOUBLE_BE'
elif read[:16] == _BEFORE_SEQ:
if read[16:-8] == _IEEE_DOUBLE_LE:
return 'IEEE_DOUBLE_LE'
elif read[16:-8] == _IEEE_DOUBLE_BE:
return 'IEEE_DOUBLE_BE'
if saw is not None:
raise ValueError("Unrecognized format (%s)" % saw)
else:
# We never detected the after_sequence
raise ValueError("Could not lock sequences (%s)" % saw)
|
{
"content_hash": "5b6ddb4b95468f9e03619cd49d6cff95",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 86,
"avg_line_length": 39.22950819672131,
"alnum_prop": 0.5691600501462599,
"repo_name": "ogrisel/numpy",
"id": "1f3e6b44edc4a694e2aed1ce3ebf60e5c927fc48",
"size": "11965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numpy/core/setup_common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8458376"
},
{
"name": "C++",
"bytes": "231081"
},
{
"name": "CSS",
"bytes": "6226"
},
{
"name": "FORTRAN",
"bytes": "14157"
},
{
"name": "Objective-C",
"bytes": "4836"
},
{
"name": "Perl",
"bytes": "458"
},
{
"name": "Python",
"bytes": "5745897"
},
{
"name": "Shell",
"bytes": "3950"
},
{
"name": "TeX",
"bytes": "104434"
}
],
"symlink_target": ""
}
|
"""
Django settings for blog project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm^dg()k-oal=zak$!4d$2h#l_c)+h68uw3j7zthnk)27az$8kj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'settings.urls'
WSGI_APPLICATION = 'settings.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
{
"content_hash": "73f80b5611f44c45c9a8f141e3c4c38c",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 71,
"avg_line_length": 24.476190476190474,
"alnum_prop": 0.7232490272373541,
"repo_name": "caulagi/django-adminapi",
"id": "d259abd34e804088c5551ed68fcdf72393c8b1cf",
"size": "2056",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/weblog/settings/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "283"
},
{
"name": "Python",
"bytes": "4213"
}
],
"symlink_target": ""
}
|
import sys
import unittest
import unittest.mock
from test import support
from test.support import import_helper
from test.support import os_helper
from test.support import socket_helper
from test.support import threading_helper
from test.support import warnings_helper
import socket
import select
import time
import enum
import gc
import os
import errno
import pprint
import urllib.request
import threading
import traceback
import weakref
import platform
import sysconfig
import functools
try:
import ctypes
except ImportError:
ctypes = None
asyncore = warnings_helper.import_deprecated('asyncore')
ssl = import_helper.import_module("ssl")
import _ssl
from ssl import TLSVersion, _TLSContentType, _TLSMessageType, _TLSAlertType
Py_DEBUG = hasattr(sys, 'gettotalrefcount')
Py_DEBUG_WIN32 = Py_DEBUG and sys.platform == 'win32'
PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
HOST = socket_helper.HOST
IS_OPENSSL_3_0_0 = ssl.OPENSSL_VERSION_INFO >= (3, 0, 0)
PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS')
PROTOCOL_TO_TLS_VERSION = {}
for proto, ver in (
("PROTOCOL_SSLv23", "SSLv3"),
("PROTOCOL_TLSv1", "TLSv1"),
("PROTOCOL_TLSv1_1", "TLSv1_1"),
):
try:
proto = getattr(ssl, proto)
ver = getattr(ssl.TLSVersion, ver)
except AttributeError:
continue
PROTOCOL_TO_TLS_VERSION[proto] = ver
def data_file(*name):
return os.path.join(os.path.dirname(__file__), *name)
# The custom key and certificate files used in test_ssl are generated
# using Lib/test/make_ssl_certs.py.
# Other certificates are simply fetched from the internet servers they
# are meant to authenticate.
CERTFILE = data_file("keycert.pem")
BYTES_CERTFILE = os.fsencode(CERTFILE)
ONLYCERT = data_file("ssl_cert.pem")
ONLYKEY = data_file("ssl_key.pem")
BYTES_ONLYCERT = os.fsencode(ONLYCERT)
BYTES_ONLYKEY = os.fsencode(ONLYKEY)
CERTFILE_PROTECTED = data_file("keycert.passwd.pem")
ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem")
KEY_PASSWORD = "somepass"
CAPATH = data_file("capath")
BYTES_CAPATH = os.fsencode(CAPATH)
CAFILE_NEURONIO = data_file("capath", "4e1295a3.0")
CAFILE_CACERT = data_file("capath", "5ed36f99.0")
CERTFILE_INFO = {
'issuer': ((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),)),
'notAfter': 'Aug 26 14:23:15 2028 GMT',
'notBefore': 'Aug 29 14:23:15 2018 GMT',
'serialNumber': '98A7CF88C74A32ED',
'subject': ((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),)),
'subjectAltName': (('DNS', 'localhost'),),
'version': 3
}
# empty CRL
CRLFILE = data_file("revocation.crl")
# Two keys and certs signed by the same CA (for SNI tests)
SIGNED_CERTFILE = data_file("keycert3.pem")
SIGNED_CERTFILE_HOSTNAME = 'localhost'
SIGNED_CERTFILE_INFO = {
'OCSP': ('http://testca.pythontest.net/testca/ocsp/',),
'caIssuers': ('http://testca.pythontest.net/testca/pycacert.cer',),
'crlDistributionPoints': ('http://testca.pythontest.net/testca/revocation.crl',),
'issuer': ((('countryName', 'XY'),),
(('organizationName', 'Python Software Foundation CA'),),
(('commonName', 'our-ca-server'),)),
'notAfter': 'Oct 28 14:23:16 2037 GMT',
'notBefore': 'Aug 29 14:23:16 2018 GMT',
'serialNumber': 'CB2D80995A69525C',
'subject': ((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),)),
'subjectAltName': (('DNS', 'localhost'),),
'version': 3
}
SIGNED_CERTFILE2 = data_file("keycert4.pem")
SIGNED_CERTFILE2_HOSTNAME = 'fakehostname'
SIGNED_CERTFILE_ECC = data_file("keycertecc.pem")
SIGNED_CERTFILE_ECC_HOSTNAME = 'localhost-ecc'
# Same certificate as pycacert.pem, but without extra text in file
SIGNING_CA = data_file("capath", "ceff1710.0")
# cert with all kinds of subject alt names
ALLSANFILE = data_file("allsans.pem")
IDNSANSFILE = data_file("idnsans.pem")
NOSANFILE = data_file("nosan.pem")
NOSAN_HOSTNAME = 'localhost'
REMOTE_HOST = "self-signed.pythontest.net"
EMPTYCERT = data_file("nullcert.pem")
BADCERT = data_file("badcert.pem")
NONEXISTINGCERT = data_file("XXXnonexisting.pem")
BADKEY = data_file("badkey.pem")
NOKIACERT = data_file("nokia.pem")
NULLBYTECERT = data_file("nullbytecert.pem")
TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem")
DHFILE = data_file("ffdh3072.pem")
BYTES_DHFILE = os.fsencode(DHFILE)
# Not defined in all versions of OpenSSL
OP_NO_COMPRESSION = getattr(ssl, "OP_NO_COMPRESSION", 0)
OP_SINGLE_DH_USE = getattr(ssl, "OP_SINGLE_DH_USE", 0)
OP_SINGLE_ECDH_USE = getattr(ssl, "OP_SINGLE_ECDH_USE", 0)
OP_CIPHER_SERVER_PREFERENCE = getattr(ssl, "OP_CIPHER_SERVER_PREFERENCE", 0)
OP_ENABLE_MIDDLEBOX_COMPAT = getattr(ssl, "OP_ENABLE_MIDDLEBOX_COMPAT", 0)
OP_IGNORE_UNEXPECTED_EOF = getattr(ssl, "OP_IGNORE_UNEXPECTED_EOF", 0)
# Ubuntu has patched OpenSSL and changed behavior of security level 2
# see https://bugs.python.org/issue41561#msg389003
def is_ubuntu():
try:
# Assume that any references of "ubuntu" implies Ubuntu-like distro
# The workaround is not required for 18.04, but doesn't hurt either.
with open("/etc/os-release", encoding="utf-8") as f:
return "ubuntu" in f.read()
except FileNotFoundError:
return False
if is_ubuntu():
def seclevel_workaround(*ctxs):
""""Lower security level to '1' and allow all ciphers for TLS 1.0/1"""
for ctx in ctxs:
if (
hasattr(ctx, "minimum_version") and
ctx.minimum_version <= ssl.TLSVersion.TLSv1_1
):
ctx.set_ciphers("@SECLEVEL=1:ALL")
else:
def seclevel_workaround(*ctxs):
pass
def has_tls_protocol(protocol):
"""Check if a TLS protocol is available and enabled
:param protocol: enum ssl._SSLMethod member or name
:return: bool
"""
if isinstance(protocol, str):
assert protocol.startswith('PROTOCOL_')
protocol = getattr(ssl, protocol, None)
if protocol is None:
return False
if protocol in {
ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_SERVER,
ssl.PROTOCOL_TLS_CLIENT
}:
# auto-negotiate protocols are always available
return True
name = protocol.name
return has_tls_version(name[len('PROTOCOL_'):])
@functools.lru_cache
def has_tls_version(version):
"""Check if a TLS/SSL version is enabled
:param version: TLS version name or ssl.TLSVersion member
:return: bool
"""
if version == "SSLv2":
# never supported and not even in TLSVersion enum
return False
if isinstance(version, str):
version = ssl.TLSVersion.__members__[version]
# check compile time flags like ssl.HAS_TLSv1_2
if not getattr(ssl, f'HAS_{version.name}'):
return False
if IS_OPENSSL_3_0_0 and version < ssl.TLSVersion.TLSv1_2:
# bpo43791: 3.0.0-alpha14 fails with TLSV1_ALERT_INTERNAL_ERROR
return False
# check runtime and dynamic crypto policy settings. A TLS version may
# be compiled in but disabled by a policy or config option.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
if (
hasattr(ctx, 'minimum_version') and
ctx.minimum_version != ssl.TLSVersion.MINIMUM_SUPPORTED and
version < ctx.minimum_version
):
return False
if (
hasattr(ctx, 'maximum_version') and
ctx.maximum_version != ssl.TLSVersion.MAXIMUM_SUPPORTED and
version > ctx.maximum_version
):
return False
return True
def requires_tls_version(version):
"""Decorator to skip tests when a required TLS version is not available
:param version: TLS version name or ssl.TLSVersion member
:return:
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
if not has_tls_version(version):
raise unittest.SkipTest(f"{version} is not available.")
else:
return func(*args, **kw)
return wrapper
return decorator
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
if support.verbose:
sys.stdout.write(prefix + exc_format)
def utc_offset(): #NOTE: ignore issues like #1647654
# local time = utc time + utc offset
if time.daylight and time.localtime().tm_isdst > 0:
return -time.altzone # seconds
return -time.timezone
ignore_deprecation = warnings_helper.ignore_warnings(
category=DeprecationWarning
)
def test_wrap_socket(sock, *,
cert_reqs=ssl.CERT_NONE, ca_certs=None,
ciphers=None, certfile=None, keyfile=None,
**kwargs):
if not kwargs.get("server_side"):
kwargs["server_hostname"] = SIGNED_CERTFILE_HOSTNAME
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
else:
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
if cert_reqs is not None:
if cert_reqs == ssl.CERT_NONE:
context.check_hostname = False
context.verify_mode = cert_reqs
if ca_certs is not None:
context.load_verify_locations(ca_certs)
if certfile is not None or keyfile is not None:
context.load_cert_chain(certfile, keyfile)
if ciphers is not None:
context.set_ciphers(ciphers)
return context.wrap_socket(sock, **kwargs)
def testing_context(server_cert=SIGNED_CERTFILE, *, server_chain=True):
"""Create context
client_context, server_context, hostname = testing_context()
"""
if server_cert == SIGNED_CERTFILE:
hostname = SIGNED_CERTFILE_HOSTNAME
elif server_cert == SIGNED_CERTFILE2:
hostname = SIGNED_CERTFILE2_HOSTNAME
elif server_cert == NOSANFILE:
hostname = NOSAN_HOSTNAME
else:
raise ValueError(server_cert)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.load_verify_locations(SIGNING_CA)
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(server_cert)
if server_chain:
server_context.load_verify_locations(SIGNING_CA)
return client_context, server_context, hostname
class BasicSocketTests(unittest.TestCase):
def test_constants(self):
ssl.CERT_NONE
ssl.CERT_OPTIONAL
ssl.CERT_REQUIRED
ssl.OP_CIPHER_SERVER_PREFERENCE
ssl.OP_SINGLE_DH_USE
ssl.OP_SINGLE_ECDH_USE
ssl.OP_NO_COMPRESSION
self.assertEqual(ssl.HAS_SNI, True)
self.assertEqual(ssl.HAS_ECDH, True)
self.assertEqual(ssl.HAS_TLSv1_2, True)
self.assertEqual(ssl.HAS_TLSv1_3, True)
ssl.OP_NO_SSLv2
ssl.OP_NO_SSLv3
ssl.OP_NO_TLSv1
ssl.OP_NO_TLSv1_3
ssl.OP_NO_TLSv1_1
ssl.OP_NO_TLSv1_2
self.assertEqual(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv23)
def test_ssl_types(self):
ssl_types = [
_ssl._SSLContext,
_ssl._SSLSocket,
_ssl.MemoryBIO,
_ssl.Certificate,
_ssl.SSLSession,
_ssl.SSLError,
]
for ssl_type in ssl_types:
with self.subTest(ssl_type=ssl_type):
with self.assertRaisesRegex(TypeError, "immutable type"):
ssl_type.value = None
support.check_disallow_instantiation(self, _ssl.Certificate)
def test_private_init(self):
with self.assertRaisesRegex(TypeError, "public constructor"):
with socket.socket() as s:
ssl.SSLSocket(s)
def test_str_for_enums(self):
# Make sure that the PROTOCOL_* constants have enum-like string
# reprs.
proto = ssl.PROTOCOL_TLS_CLIENT
self.assertEqual(repr(proto), '<_SSLMethod.PROTOCOL_TLS_CLIENT: %r>' % proto.value)
self.assertEqual(str(proto), str(proto.value))
ctx = ssl.SSLContext(proto)
self.assertIs(ctx.protocol, proto)
def test_random(self):
v = ssl.RAND_status()
if support.verbose:
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
with warnings_helper.check_warnings():
data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
self.assertEqual(len(data), 16)
self.assertEqual(is_cryptographic, v == 1)
if v:
data = ssl.RAND_bytes(16)
self.assertEqual(len(data), 16)
else:
self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16)
# negative num is invalid
self.assertRaises(ValueError, ssl.RAND_bytes, -5)
with warnings_helper.check_warnings():
self.assertRaises(ValueError, ssl.RAND_pseudo_bytes, -5)
ssl.RAND_add("this is a random string", 75.0)
ssl.RAND_add(b"this is a random bytes object", 75.0)
ssl.RAND_add(bytearray(b"this is a random bytearray object"), 75.0)
def test_parse_cert(self):
# note that this uses an 'unofficial' function in _ssl.c,
# provided solely for this test, to exercise the certificate
# parsing code
self.assertEqual(
ssl._ssl._test_decode_cert(CERTFILE),
CERTFILE_INFO
)
self.assertEqual(
ssl._ssl._test_decode_cert(SIGNED_CERTFILE),
SIGNED_CERTFILE_INFO
)
# Issue #13034: the subjectAltName in some certificates
# (notably projects.developer.nokia.com:443) wasn't parsed
p = ssl._ssl._test_decode_cert(NOKIACERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subjectAltName'],
(('DNS', 'projects.developer.nokia.com'),
('DNS', 'projects.forum.nokia.com'))
)
# extra OCSP and AIA fields
self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',))
self.assertEqual(p['caIssuers'],
('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',))
self.assertEqual(p['crlDistributionPoints'],
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',))
def test_parse_cert_CVE_2019_5010(self):
p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(
p,
{
'issuer': (
(('countryName', 'UK'),), (('commonName', 'cody-ca'),)),
'notAfter': 'Jun 14 18:00:58 2028 GMT',
'notBefore': 'Jun 18 18:00:58 2018 GMT',
'serialNumber': '02',
'subject': ((('countryName', 'UK'),),
(('commonName',
'codenomicon-vm-2.test.lal.cisco.com'),)),
'subjectAltName': (
('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),),
'version': 3
}
)
def test_parse_cert_CVE_2013_4238(self):
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
subject = ((('countryName', 'US'),),
(('stateOrProvinceName', 'Oregon'),),
(('localityName', 'Beaverton'),),
(('organizationName', 'Python Software Foundation'),),
(('organizationalUnitName', 'Python Core Development'),),
(('commonName', 'null.python.org\x00example.org'),),
(('emailAddress', 'python-dev@python.org'),))
self.assertEqual(p['subject'], subject)
self.assertEqual(p['issuer'], subject)
if ssl._OPENSSL_API_VERSION >= (0, 9, 8):
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', 'null@python.org\x00user@example.org'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '2001:DB8:0:0:0:0:0:1'))
else:
# OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', 'null@python.org\x00user@example.org'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '<invalid>'))
self.assertEqual(p['subjectAltName'], san)
def test_parse_all_sans(self):
p = ssl._ssl._test_decode_cert(ALLSANFILE)
self.assertEqual(p['subjectAltName'],
(
('DNS', 'allsans'),
('othername', '<unsupported>'),
('othername', '<unsupported>'),
('email', 'user@example.org'),
('DNS', 'www.example.org'),
('DirName',
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'dirname example'),))),
('URI', 'https://www.python.org/'),
('IP Address', '127.0.0.1'),
('IP Address', '0:0:0:0:0:0:0:1'),
('Registered ID', '1.2.3.4.5')
)
)
def test_DER_to_PEM(self):
with open(CAFILE_CACERT, 'r') as f:
pem = f.read()
d1 = ssl.PEM_cert_to_DER_cert(pem)
p2 = ssl.DER_cert_to_PEM_cert(d1)
d2 = ssl.PEM_cert_to_DER_cert(p2)
self.assertEqual(d1, d2)
if not p2.startswith(ssl.PEM_HEADER + '\n'):
self.fail("DER-to-PEM didn't include correct header:\n%r\n" % p2)
if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'):
self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2)
def test_openssl_version(self):
n = ssl.OPENSSL_VERSION_NUMBER
t = ssl.OPENSSL_VERSION_INFO
s = ssl.OPENSSL_VERSION
self.assertIsInstance(n, int)
self.assertIsInstance(t, tuple)
self.assertIsInstance(s, str)
# Some sanity checks follow
# >= 1.1.1
self.assertGreaterEqual(n, 0x10101000)
# < 4.0
self.assertLess(n, 0x40000000)
major, minor, fix, patch, status = t
self.assertGreaterEqual(major, 1)
self.assertLess(major, 4)
self.assertGreaterEqual(minor, 0)
self.assertLess(minor, 256)
self.assertGreaterEqual(fix, 0)
self.assertLess(fix, 256)
self.assertGreaterEqual(patch, 0)
self.assertLessEqual(patch, 63)
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
libressl_ver = f"LibreSSL {major:d}"
if major >= 3:
# 3.x uses 0xMNN00PP0L
openssl_ver = f"OpenSSL {major:d}.{minor:d}.{patch:d}"
else:
openssl_ver = f"OpenSSL {major:d}.{minor:d}.{fix:d}"
self.assertTrue(
s.startswith((openssl_ver, libressl_ver)),
(s, t, hex(n))
)
@support.cpython_only
def test_refcycle(self):
# Issue #7943: an SSL object doesn't create reference cycles with
# itself.
s = socket.socket(socket.AF_INET)
ss = test_wrap_socket(s)
wr = weakref.ref(ss)
with warnings_helper.check_warnings(("", ResourceWarning)):
del ss
self.assertEqual(wr(), None)
def test_wrapped_unconnected(self):
# Methods on an unconnected SSLSocket propagate the original
# OSError raise by the underlying socket object.
s = socket.socket(socket.AF_INET)
with test_wrap_socket(s) as ss:
self.assertRaises(OSError, ss.recv, 1)
self.assertRaises(OSError, ss.recv_into, bytearray(b'x'))
self.assertRaises(OSError, ss.recvfrom, 1)
self.assertRaises(OSError, ss.recvfrom_into, bytearray(b'x'), 1)
self.assertRaises(OSError, ss.send, b'x')
self.assertRaises(OSError, ss.sendto, b'x', ('0.0.0.0', 0))
self.assertRaises(NotImplementedError, ss.dup)
self.assertRaises(NotImplementedError, ss.sendmsg,
[b'x'], (), 0, ('0.0.0.0', 0))
self.assertRaises(NotImplementedError, ss.recvmsg, 100)
self.assertRaises(NotImplementedError, ss.recvmsg_into,
[bytearray(100)])
def test_timeout(self):
# Issue #8524: when creating an SSL socket, the timeout of the
# original socket should be retained.
for timeout in (None, 0.0, 5.0):
s = socket.socket(socket.AF_INET)
s.settimeout(timeout)
with test_wrap_socket(s) as ss:
self.assertEqual(timeout, ss.gettimeout())
def test_openssl111_deprecations(self):
options = [
ssl.OP_NO_TLSv1,
ssl.OP_NO_TLSv1_1,
ssl.OP_NO_TLSv1_2,
ssl.OP_NO_TLSv1_3
]
protocols = [
ssl.PROTOCOL_TLSv1,
ssl.PROTOCOL_TLSv1_1,
ssl.PROTOCOL_TLSv1_2,
ssl.PROTOCOL_TLS
]
versions = [
ssl.TLSVersion.SSLv3,
ssl.TLSVersion.TLSv1,
ssl.TLSVersion.TLSv1_1,
]
for option in options:
with self.subTest(option=option):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with self.assertWarns(DeprecationWarning) as cm:
ctx.options |= option
self.assertEqual(
'ssl.OP_NO_SSL*/ssl.OP_NO_TLS* options are deprecated',
str(cm.warning)
)
for protocol in protocols:
if not has_tls_protocol(protocol):
continue
with self.subTest(protocol=protocol):
with self.assertWarns(DeprecationWarning) as cm:
ssl.SSLContext(protocol)
self.assertEqual(
f'ssl.{protocol.name} is deprecated',
str(cm.warning)
)
for version in versions:
if not has_tls_version(version):
continue
with self.subTest(version=version):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with self.assertWarns(DeprecationWarning) as cm:
ctx.minimum_version = version
version_text = '%s.%s' % (version.__class__.__name__, version.name)
self.assertEqual(
f'ssl.{version_text} is deprecated',
str(cm.warning)
)
@ignore_deprecation
def test_errors_sslwrap(self):
sock = socket.socket()
self.assertRaisesRegex(ValueError,
"certfile must be specified",
ssl.wrap_socket, sock, keyfile=CERTFILE)
self.assertRaisesRegex(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True)
self.assertRaisesRegex(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True, certfile="")
with ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE) as s:
self.assertRaisesRegex(ValueError, "can't connect in server-side mode",
s.connect, (HOST, 8080))
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock,
certfile=CERTFILE, keyfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock,
certfile=NONEXISTINGCERT, keyfile=NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def bad_cert_test(self, certfile):
"""Check that trying to use the given client certificate fails"""
certfile = os.path.join(os.path.dirname(__file__) or os.curdir,
certfile)
sock = socket.socket()
self.addCleanup(sock.close)
with self.assertRaises(ssl.SSLError):
test_wrap_socket(sock,
certfile=certfile)
def test_empty_cert(self):
"""Wrapping with an empty cert file"""
self.bad_cert_test("nullcert.pem")
def test_malformed_cert(self):
"""Wrapping with a badly formatted certificate (syntax error)"""
self.bad_cert_test("badcert.pem")
def test_malformed_key(self):
"""Wrapping with a badly formatted key (syntax error)"""
self.bad_cert_test("badkey.pem")
@ignore_deprecation
def test_match_hostname(self):
def ok(cert, hostname):
ssl.match_hostname(cert, hostname)
def fail(cert, hostname):
self.assertRaises(ssl.CertificateError,
ssl.match_hostname, cert, hostname)
# -- Hostname matching --
cert = {'subject': ((('commonName', 'example.com'),),)}
ok(cert, 'example.com')
ok(cert, 'ExAmple.cOm')
fail(cert, 'www.example.com')
fail(cert, '.example.com')
fail(cert, 'example.org')
fail(cert, 'exampleXcom')
cert = {'subject': ((('commonName', '*.a.com'),),)}
ok(cert, 'foo.a.com')
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
# only match wildcards when they are the only thing
# in left-most segment
cert = {'subject': ((('commonName', 'f*.com'),),)}
fail(cert, 'foo.com')
fail(cert, 'f.com')
fail(cert, 'bar.com')
fail(cert, 'foo.a.com')
fail(cert, 'bar.foo.com')
# NULL bytes are bad, CVE-2013-4073
cert = {'subject': ((('commonName',
'null.python.org\x00example.org'),),)}
ok(cert, 'null.python.org\x00example.org') # or raise an error?
fail(cert, 'example.org')
fail(cert, 'null.python.org')
# error cases with wildcards
cert = {'subject': ((('commonName', '*.*.a.com'),),)}
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
cert = {'subject': ((('commonName', 'a.*.com'),),)}
fail(cert, 'a.foo.com')
fail(cert, 'a..com')
fail(cert, 'a.com')
# wildcard doesn't match IDNA prefix 'xn--'
idna = 'püthon.python.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
ok(cert, idna)
cert = {'subject': ((('commonName', 'x*.python.org'),),)}
fail(cert, idna)
cert = {'subject': ((('commonName', 'xn--p*.python.org'),),)}
fail(cert, idna)
# wildcard in first fragment and IDNA A-labels in sequent fragments
# are supported.
idna = 'www*.pythön.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
fail(cert, 'www.pythön.org'.encode("idna").decode("ascii"))
fail(cert, 'www1.pythön.org'.encode("idna").decode("ascii"))
fail(cert, 'ftp.pythön.org'.encode("idna").decode("ascii"))
fail(cert, 'pythön.org'.encode("idna").decode("ascii"))
# Slightly fake real-world example
cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT',
'subject': ((('commonName', 'linuxfrz.org'),),),
'subjectAltName': (('DNS', 'linuxfr.org'),
('DNS', 'linuxfr.com'),
('othername', '<unsupported>'))}
ok(cert, 'linuxfr.org')
ok(cert, 'linuxfr.com')
# Not a "DNS" entry
fail(cert, '<unsupported>')
# When there is a subjectAltName, commonName isn't used
fail(cert, 'linuxfrz.org')
# A pristine real-world example
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),),
(('commonName', 'mail.google.com'),))}
ok(cert, 'mail.google.com')
fail(cert, 'gmail.com')
# Only commonName is considered
fail(cert, 'California')
# -- IPv4 matching --
cert = {'subject': ((('commonName', 'example.com'),),),
'subjectAltName': (('DNS', 'example.com'),
('IP Address', '10.11.12.13'),
('IP Address', '14.15.16.17'),
('IP Address', '127.0.0.1'))}
ok(cert, '10.11.12.13')
ok(cert, '14.15.16.17')
# socket.inet_ntoa(socket.inet_aton('127.1')) == '127.0.0.1'
fail(cert, '127.1')
fail(cert, '14.15.16.17 ')
fail(cert, '14.15.16.17 extra data')
fail(cert, '14.15.16.18')
fail(cert, 'example.net')
# -- IPv6 matching --
if socket_helper.IPV6_ENABLED:
cert = {'subject': ((('commonName', 'example.com'),),),
'subjectAltName': (
('DNS', 'example.com'),
('IP Address', '2001:0:0:0:0:0:0:CAFE\n'),
('IP Address', '2003:0:0:0:0:0:0:BABA\n'))}
ok(cert, '2001::cafe')
ok(cert, '2003::baba')
fail(cert, '2003::baba ')
fail(cert, '2003::baba extra data')
fail(cert, '2003::bebe')
fail(cert, 'example.net')
# -- Miscellaneous --
# Neither commonName nor subjectAltName
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),))}
fail(cert, 'mail.google.com')
# No DNS entry in subjectAltName but a commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('commonName', 'mail.google.com'),)),
'subjectAltName': (('othername', 'blabla'), )}
ok(cert, 'mail.google.com')
# No DNS entry subjectAltName and no commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),)),
'subjectAltName': (('othername', 'blabla'),)}
fail(cert, 'google.com')
# Empty cert / no cert
self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com')
self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com')
# Issue #17980: avoid denials of service by refusing more than one
# wildcard per fragment.
cert = {'subject': ((('commonName', 'a*b.example.com'),),)}
with self.assertRaisesRegex(
ssl.CertificateError,
"partial wildcards in leftmost label are not supported"):
ssl.match_hostname(cert, 'axxb.example.com')
cert = {'subject': ((('commonName', 'www.*.example.com'),),)}
with self.assertRaisesRegex(
ssl.CertificateError,
"wildcard can only be present in the leftmost label"):
ssl.match_hostname(cert, 'www.sub.example.com')
cert = {'subject': ((('commonName', 'a*b*.example.com'),),)}
with self.assertRaisesRegex(
ssl.CertificateError,
"too many wildcards"):
ssl.match_hostname(cert, 'axxbxxc.example.com')
cert = {'subject': ((('commonName', '*'),),)}
with self.assertRaisesRegex(
ssl.CertificateError,
"sole wildcard without additional labels are not support"):
ssl.match_hostname(cert, 'host')
cert = {'subject': ((('commonName', '*.com'),),)}
with self.assertRaisesRegex(
ssl.CertificateError,
r"hostname 'com' doesn't match '\*.com'"):
ssl.match_hostname(cert, 'com')
# extra checks for _inet_paton()
for invalid in ['1', '', '1.2.3', '256.0.0.1', '127.0.0.1/24']:
with self.assertRaises(ValueError):
ssl._inet_paton(invalid)
for ipaddr in ['127.0.0.1', '192.168.0.1']:
self.assertTrue(ssl._inet_paton(ipaddr))
if socket_helper.IPV6_ENABLED:
for ipaddr in ['::1', '2001:db8:85a3::8a2e:370:7334']:
self.assertTrue(ssl._inet_paton(ipaddr))
def test_server_side(self):
# server_hostname doesn't work for server sockets
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
with socket.socket() as sock:
self.assertRaises(ValueError, ctx.wrap_socket, sock, True,
server_hostname="some.hostname")
def test_unknown_channel_binding(self):
# should raise ValueError for unknown type
s = socket.create_server(('127.0.0.1', 0))
c = socket.socket(socket.AF_INET)
c.connect(s.getsockname())
with test_wrap_socket(c, do_handshake_on_connect=False) as ss:
with self.assertRaises(ValueError):
ss.get_channel_binding("unknown-type")
s.close()
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
# unconnected should return None for known type
s = socket.socket(socket.AF_INET)
with test_wrap_socket(s) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
# the same for server-side
s = socket.socket(socket.AF_INET)
with test_wrap_socket(s, server_side=True, certfile=CERTFILE) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
def test_dealloc_warn(self):
ss = test_wrap_socket(socket.socket(socket.AF_INET))
r = repr(ss)
with self.assertWarns(ResourceWarning) as cm:
ss = None
support.gc_collect()
self.assertIn(r, str(cm.warning.args[0]))
def test_get_default_verify_paths(self):
paths = ssl.get_default_verify_paths()
self.assertEqual(len(paths), 6)
self.assertIsInstance(paths, ssl.DefaultVerifyPaths)
with os_helper.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
paths = ssl.get_default_verify_paths()
self.assertEqual(paths.cafile, CERTFILE)
self.assertEqual(paths.capath, CAPATH)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_certificates(self):
self.assertTrue(ssl.enum_certificates("CA"))
self.assertTrue(ssl.enum_certificates("ROOT"))
self.assertRaises(TypeError, ssl.enum_certificates)
self.assertRaises(WindowsError, ssl.enum_certificates, "")
trust_oids = set()
for storename in ("CA", "ROOT"):
store = ssl.enum_certificates(storename)
self.assertIsInstance(store, list)
for element in store:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 3)
cert, enc, trust = element
self.assertIsInstance(cert, bytes)
self.assertIn(enc, {"x509_asn", "pkcs_7_asn"})
self.assertIsInstance(trust, (frozenset, set, bool))
if isinstance(trust, (frozenset, set)):
trust_oids.update(trust)
serverAuth = "1.3.6.1.5.5.7.3.1"
self.assertIn(serverAuth, trust_oids)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_crls(self):
self.assertTrue(ssl.enum_crls("CA"))
self.assertRaises(TypeError, ssl.enum_crls)
self.assertRaises(WindowsError, ssl.enum_crls, "")
crls = ssl.enum_crls("CA")
self.assertIsInstance(crls, list)
for element in crls:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 2)
self.assertIsInstance(element[0], bytes)
self.assertIn(element[1], {"x509_asn", "pkcs_7_asn"})
def test_asn1object(self):
expected = (129, 'serverAuth', 'TLS Web Server Authentication',
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertEqual(val, expected)
self.assertEqual(val.nid, 129)
self.assertEqual(val.shortname, 'serverAuth')
self.assertEqual(val.longname, 'TLS Web Server Authentication')
self.assertEqual(val.oid, '1.3.6.1.5.5.7.3.1')
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object, 'serverAuth')
val = ssl._ASN1Object.fromnid(129)
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object.fromnid, -1)
with self.assertRaisesRegex(ValueError, "unknown NID 100000"):
ssl._ASN1Object.fromnid(100000)
for i in range(1000):
try:
obj = ssl._ASN1Object.fromnid(i)
except ValueError:
pass
else:
self.assertIsInstance(obj.nid, int)
self.assertIsInstance(obj.shortname, str)
self.assertIsInstance(obj.longname, str)
self.assertIsInstance(obj.oid, (str, type(None)))
val = ssl._ASN1Object.fromname('TLS Web Server Authentication')
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertEqual(ssl._ASN1Object.fromname('serverAuth'), expected)
self.assertEqual(ssl._ASN1Object.fromname('1.3.6.1.5.5.7.3.1'),
expected)
with self.assertRaisesRegex(ValueError, "unknown object 'serverauth'"):
ssl._ASN1Object.fromname('serverauth')
def test_purpose_enum(self):
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertIsInstance(ssl.Purpose.SERVER_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.SERVER_AUTH, val)
self.assertEqual(ssl.Purpose.SERVER_AUTH.nid, 129)
self.assertEqual(ssl.Purpose.SERVER_AUTH.shortname, 'serverAuth')
self.assertEqual(ssl.Purpose.SERVER_AUTH.oid,
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.2')
self.assertIsInstance(ssl.Purpose.CLIENT_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.CLIENT_AUTH, val)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.nid, 130)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.shortname, 'clientAuth')
self.assertEqual(ssl.Purpose.CLIENT_AUTH.oid,
'1.3.6.1.5.5.7.3.2')
def test_unsupported_dtls(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
with self.assertRaises(NotImplementedError) as cx:
test_wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with self.assertRaises(NotImplementedError) as cx:
ctx.wrap_socket(s)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
def cert_time_ok(self, timestring, timestamp):
self.assertEqual(ssl.cert_time_to_seconds(timestring), timestamp)
def cert_time_fail(self, timestring):
with self.assertRaises(ValueError):
ssl.cert_time_to_seconds(timestring)
@unittest.skipUnless(utc_offset(),
'local time needs to be different from UTC')
def test_cert_time_to_seconds_timezone(self):
# Issue #19940: ssl.cert_time_to_seconds() returns wrong
# results if local timezone is not UTC
self.cert_time_ok("May 9 00:00:00 2007 GMT", 1178668800.0)
self.cert_time_ok("Jan 5 09:34:43 2018 GMT", 1515144883.0)
def test_cert_time_to_seconds(self):
timestring = "Jan 5 09:34:43 2018 GMT"
ts = 1515144883.0
self.cert_time_ok(timestring, ts)
# accept keyword parameter, assert its name
self.assertEqual(ssl.cert_time_to_seconds(cert_time=timestring), ts)
# accept both %e and %d (space or zero generated by strftime)
self.cert_time_ok("Jan 05 09:34:43 2018 GMT", ts)
# case-insensitive
self.cert_time_ok("JaN 5 09:34:43 2018 GmT", ts)
self.cert_time_fail("Jan 5 09:34 2018 GMT") # no seconds
self.cert_time_fail("Jan 5 09:34:43 2018") # no GMT
self.cert_time_fail("Jan 5 09:34:43 2018 UTC") # not GMT timezone
self.cert_time_fail("Jan 35 09:34:43 2018 GMT") # invalid day
self.cert_time_fail("Jon 5 09:34:43 2018 GMT") # invalid month
self.cert_time_fail("Jan 5 24:00:00 2018 GMT") # invalid hour
self.cert_time_fail("Jan 5 09:60:43 2018 GMT") # invalid minute
newyear_ts = 1230768000.0
# leap seconds
self.cert_time_ok("Dec 31 23:59:60 2008 GMT", newyear_ts)
# same timestamp
self.cert_time_ok("Jan 1 00:00:00 2009 GMT", newyear_ts)
self.cert_time_ok("Jan 5 09:34:59 2018 GMT", 1515144899)
# allow 60th second (even if it is not a leap second)
self.cert_time_ok("Jan 5 09:34:60 2018 GMT", 1515144900)
# allow 2nd leap second for compatibility with time.strptime()
self.cert_time_ok("Jan 5 09:34:61 2018 GMT", 1515144901)
self.cert_time_fail("Jan 5 09:34:62 2018 GMT") # invalid seconds
# no special treatment for the special value:
# 99991231235959Z (rfc 5280)
self.cert_time_ok("Dec 31 23:59:59 9999 GMT", 253402300799.0)
@support.run_with_locale('LC_ALL', '')
def test_cert_time_to_seconds_locale(self):
# `cert_time_to_seconds()` should be locale independent
def local_february_name():
return time.strftime('%b', (1, 2, 3, 4, 5, 6, 0, 0, 0))
if local_february_name().lower() == 'feb':
self.skipTest("locale-specific month name needs to be "
"different from C locale")
# locale-independent
self.cert_time_ok("Feb 9 00:00:00 2007 GMT", 1170979200.0)
self.cert_time_fail(local_february_name() + " 9 00:00:00 2007 GMT")
def test_connect_ex_error(self):
server = socket.socket(socket.AF_INET)
self.addCleanup(server.close)
port = socket_helper.bind_port(server) # Reserve port but don't listen
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED)
self.addCleanup(s.close)
rc = s.connect_ex((HOST, port))
# Issue #19919: Windows machines or VMs hosted on Windows
# machines sometimes return EWOULDBLOCK.
errors = (
errno.ECONNREFUSED, errno.EHOSTUNREACH, errno.ETIMEDOUT,
errno.EWOULDBLOCK,
)
self.assertIn(rc, errors)
def test_read_write_zero(self):
# empty reads and writes now work, bpo-42854, bpo-31711
client_context, server_context, hostname = testing_context()
server = ThreadedEchoServer(context=server_context)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertEqual(s.recv(0), b"")
self.assertEqual(s.send(b""), 0)
class ContextTests(unittest.TestCase):
def test_constructor(self):
for protocol in PROTOCOLS:
if has_tls_protocol(protocol):
with warnings_helper.check_warnings():
ctx = ssl.SSLContext(protocol)
self.assertEqual(ctx.protocol, protocol)
with warnings_helper.check_warnings():
ctx = ssl.SSLContext()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS)
self.assertRaises(ValueError, ssl.SSLContext, -1)
self.assertRaises(ValueError, ssl.SSLContext, 42)
def test_ciphers(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.set_ciphers("ALL")
ctx.set_ciphers("DEFAULT")
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
ctx.set_ciphers("^$:,;?*'dorothyx")
@unittest.skipUnless(PY_SSL_DEFAULT_CIPHERS == 1,
"Test applies only to Python default ciphers")
def test_python_ciphers(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ciphers = ctx.get_ciphers()
for suite in ciphers:
name = suite['name']
self.assertNotIn("PSK", name)
self.assertNotIn("SRP", name)
self.assertNotIn("MD5", name)
self.assertNotIn("RC4", name)
self.assertNotIn("3DES", name)
def test_get_ciphers(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.set_ciphers('AESGCM')
names = set(d['name'] for d in ctx.get_ciphers())
expected = {
'AES128-GCM-SHA256',
'ECDHE-ECDSA-AES128-GCM-SHA256',
'ECDHE-RSA-AES128-GCM-SHA256',
'DHE-RSA-AES128-GCM-SHA256',
'AES256-GCM-SHA384',
'ECDHE-ECDSA-AES256-GCM-SHA384',
'ECDHE-RSA-AES256-GCM-SHA384',
'DHE-RSA-AES256-GCM-SHA384',
}
intersection = names.intersection(expected)
self.assertGreaterEqual(
len(intersection), 2, f"\ngot: {sorted(names)}\nexpected: {sorted(expected)}"
)
def test_options(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
# OP_ALL | OP_NO_SSLv2 | OP_NO_SSLv3 is the default value
default = (ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
# SSLContext also enables these by default
default |= (OP_NO_COMPRESSION | OP_CIPHER_SERVER_PREFERENCE |
OP_SINGLE_DH_USE | OP_SINGLE_ECDH_USE |
OP_ENABLE_MIDDLEBOX_COMPAT |
OP_IGNORE_UNEXPECTED_EOF)
self.assertEqual(default, ctx.options)
with warnings_helper.check_warnings():
ctx.options |= ssl.OP_NO_TLSv1
self.assertEqual(default | ssl.OP_NO_TLSv1, ctx.options)
with warnings_helper.check_warnings():
ctx.options = (ctx.options & ~ssl.OP_NO_TLSv1)
self.assertEqual(default, ctx.options)
ctx.options = 0
# Ubuntu has OP_NO_SSLv3 forced on by default
self.assertEqual(0, ctx.options & ~ssl.OP_NO_SSLv3)
def test_verify_mode_protocol(self):
with warnings_helper.check_warnings():
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
# Default value
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
ctx.verify_mode = ssl.CERT_OPTIONAL
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.verify_mode = ssl.CERT_NONE
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
with self.assertRaises(TypeError):
ctx.verify_mode = None
with self.assertRaises(ValueError):
ctx.verify_mode = 42
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertFalse(ctx.check_hostname)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
def test_hostname_checks_common_name(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertTrue(ctx.hostname_checks_common_name)
if ssl.HAS_NEVER_CHECK_COMMON_NAME:
ctx.hostname_checks_common_name = True
self.assertTrue(ctx.hostname_checks_common_name)
ctx.hostname_checks_common_name = False
self.assertFalse(ctx.hostname_checks_common_name)
ctx.hostname_checks_common_name = True
self.assertTrue(ctx.hostname_checks_common_name)
else:
with self.assertRaises(AttributeError):
ctx.hostname_checks_common_name = True
@ignore_deprecation
def test_min_max_version(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# OpenSSL default is MINIMUM_SUPPORTED, however some vendors like
# Fedora override the setting to TLS 1.0.
minimum_range = {
# stock OpenSSL
ssl.TLSVersion.MINIMUM_SUPPORTED,
# Fedora 29 uses TLS 1.0 by default
ssl.TLSVersion.TLSv1,
# RHEL 8 uses TLS 1.2 by default
ssl.TLSVersion.TLSv1_2
}
maximum_range = {
# stock OpenSSL
ssl.TLSVersion.MAXIMUM_SUPPORTED,
# Fedora 32 uses TLS 1.3 by default
ssl.TLSVersion.TLSv1_3
}
self.assertIn(
ctx.minimum_version, minimum_range
)
self.assertIn(
ctx.maximum_version, maximum_range
)
ctx.minimum_version = ssl.TLSVersion.TLSv1_1
ctx.maximum_version = ssl.TLSVersion.TLSv1_2
self.assertEqual(
ctx.minimum_version, ssl.TLSVersion.TLSv1_1
)
self.assertEqual(
ctx.maximum_version, ssl.TLSVersion.TLSv1_2
)
ctx.minimum_version = ssl.TLSVersion.MINIMUM_SUPPORTED
ctx.maximum_version = ssl.TLSVersion.TLSv1
self.assertEqual(
ctx.minimum_version, ssl.TLSVersion.MINIMUM_SUPPORTED
)
self.assertEqual(
ctx.maximum_version, ssl.TLSVersion.TLSv1
)
ctx.maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED
self.assertEqual(
ctx.maximum_version, ssl.TLSVersion.MAXIMUM_SUPPORTED
)
ctx.maximum_version = ssl.TLSVersion.MINIMUM_SUPPORTED
self.assertIn(
ctx.maximum_version,
{ssl.TLSVersion.TLSv1, ssl.TLSVersion.TLSv1_1, ssl.TLSVersion.SSLv3}
)
ctx.minimum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED
self.assertIn(
ctx.minimum_version,
{ssl.TLSVersion.TLSv1_2, ssl.TLSVersion.TLSv1_3}
)
with self.assertRaises(ValueError):
ctx.minimum_version = 42
if has_tls_protocol(ssl.PROTOCOL_TLSv1_1):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_1)
self.assertIn(
ctx.minimum_version, minimum_range
)
self.assertEqual(
ctx.maximum_version, ssl.TLSVersion.MAXIMUM_SUPPORTED
)
with self.assertRaises(ValueError):
ctx.minimum_version = ssl.TLSVersion.MINIMUM_SUPPORTED
with self.assertRaises(ValueError):
ctx.maximum_version = ssl.TLSVersion.TLSv1
@unittest.skipUnless(
hasattr(ssl.SSLContext, 'security_level'),
"requires OpenSSL >= 1.1.0"
)
def test_security_level(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
# The default security callback allows for levels between 0-5
# with OpenSSL defaulting to 1, however some vendors override the
# default value (e.g. Debian defaults to 2)
security_level_range = {
0,
1, # OpenSSL default
2, # Debian
3,
4,
5,
}
self.assertIn(ctx.security_level, security_level_range)
def test_verify_flags(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# default value
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT | tf)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_CHAIN)
ctx.verify_flags = ssl.VERIFY_DEFAULT
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT)
ctx.verify_flags = ssl.VERIFY_ALLOW_PROXY_CERTS
self.assertEqual(ctx.verify_flags, ssl.VERIFY_ALLOW_PROXY_CERTS)
# supports any value
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT
self.assertEqual(ctx.verify_flags,
ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT)
with self.assertRaises(TypeError):
ctx.verify_flags = None
def test_load_cert_chain(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# Combined key and cert in a single file
ctx.load_cert_chain(CERTFILE, keyfile=None)
ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE)
self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE)
with self.assertRaises(OSError) as cm:
ctx.load_cert_chain(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(BADCERT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(EMPTYCERT)
# Separate key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.load_cert_chain(ONLYCERT, ONLYKEY)
ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYCERT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYKEY)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT)
# Mismatching key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
with self.assertRaisesRegex(ssl.SSLError, "key values mismatch"):
ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY)
# Password protected key and cert
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=bytearray(KEY_PASSWORD.encode()))
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD)
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode())
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED,
bytearray(KEY_PASSWORD.encode()))
with self.assertRaisesRegex(TypeError, "should be a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=True)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass")
with self.assertRaisesRegex(ValueError, "cannot be longer"):
# openssl has a fixed limit on the password buffer.
# PEM_BUFSIZE is generally set to 1kb.
# Return a string larger than this.
ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400)
# Password callback
def getpass_unicode():
return KEY_PASSWORD
def getpass_bytes():
return KEY_PASSWORD.encode()
def getpass_bytearray():
return bytearray(KEY_PASSWORD.encode())
def getpass_badpass():
return "badpass"
def getpass_huge():
return b'a' * (1024 * 1024)
def getpass_bad_type():
return 9
def getpass_exception():
raise Exception('getpass error')
class GetPassCallable:
def __call__(self):
return KEY_PASSWORD
def getpass(self):
return KEY_PASSWORD
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=GetPassCallable().getpass)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass)
with self.assertRaisesRegex(ValueError, "cannot be longer"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge)
with self.assertRaisesRegex(TypeError, "must return a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type)
with self.assertRaisesRegex(Exception, "getpass error"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception)
# Make sure the password function isn't called if it isn't needed
ctx.load_cert_chain(CERTFILE, password=getpass_exception)
def test_load_verify_locations(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.load_verify_locations(CERTFILE)
ctx.load_verify_locations(cafile=CERTFILE, capath=None)
ctx.load_verify_locations(BYTES_CERTFILE)
ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None)
self.assertRaises(TypeError, ctx.load_verify_locations)
self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None)
with self.assertRaises(OSError) as cm:
ctx.load_verify_locations(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_verify_locations(BADCERT)
ctx.load_verify_locations(CERTFILE, CAPATH)
ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH)
# Issue #10989: crash if the second argument type is invalid
self.assertRaises(TypeError, ctx.load_verify_locations, None, True)
def test_load_verify_cadata(self):
# test cadata
with open(CAFILE_CACERT) as f:
cacert_pem = f.read()
cacert_der = ssl.PEM_cert_to_DER_cert(cacert_pem)
with open(CAFILE_NEURONIO) as f:
neuronio_pem = f.read()
neuronio_der = ssl.PEM_cert_to_DER_cert(neuronio_pem)
# test PEM
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 0)
ctx.load_verify_locations(cadata=cacert_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 1)
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
combined = "\n".join((cacert_pem, neuronio_pem))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# with junk around the certs
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
combined = ["head", cacert_pem, "other", neuronio_pem, "again",
neuronio_pem, "tail"]
ctx.load_verify_locations(cadata="\n".join(combined))
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# test DER
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(cadata=cacert_der)
ctx.load_verify_locations(cadata=neuronio_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=cacert_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
combined = b"".join((cacert_der, neuronio_der))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# error cases
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertRaises(TypeError, ctx.load_verify_locations, cadata=object)
with self.assertRaisesRegex(
ssl.SSLError,
"no start line: cadata does not contain a certificate"
):
ctx.load_verify_locations(cadata="broken")
with self.assertRaisesRegex(
ssl.SSLError,
"not enough data: cadata does not contain a certificate"
):
ctx.load_verify_locations(cadata=b"broken")
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_load_dh_params(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.load_dh_params(DHFILE)
if os.name != 'nt':
ctx.load_dh_params(BYTES_DHFILE)
self.assertRaises(TypeError, ctx.load_dh_params)
self.assertRaises(TypeError, ctx.load_dh_params, None)
with self.assertRaises(FileNotFoundError) as cm:
ctx.load_dh_params(NONEXISTINGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
def test_session_stats(self):
for proto in {ssl.PROTOCOL_TLS_CLIENT, ssl.PROTOCOL_TLS_SERVER}:
ctx = ssl.SSLContext(proto)
self.assertEqual(ctx.session_stats(), {
'number': 0,
'connect': 0,
'connect_good': 0,
'connect_renegotiate': 0,
'accept': 0,
'accept_good': 0,
'accept_renegotiate': 0,
'hits': 0,
'misses': 0,
'timeouts': 0,
'cache_full': 0,
})
def test_set_default_verify_paths(self):
# There's not much we can do to test that it acts as expected,
# so just check it doesn't crash or raise an exception.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.set_default_verify_paths()
@unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build")
def test_set_ecdh_curve(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.set_ecdh_curve("prime256v1")
ctx.set_ecdh_curve(b"prime256v1")
self.assertRaises(TypeError, ctx.set_ecdh_curve)
self.assertRaises(TypeError, ctx.set_ecdh_curve, None)
self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo")
self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo")
def test_sni_callback(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# set_servername_callback expects a callable, or None
self.assertRaises(TypeError, ctx.set_servername_callback)
self.assertRaises(TypeError, ctx.set_servername_callback, 4)
self.assertRaises(TypeError, ctx.set_servername_callback, "")
self.assertRaises(TypeError, ctx.set_servername_callback, ctx)
def dummycallback(sock, servername, ctx):
pass
ctx.set_servername_callback(None)
ctx.set_servername_callback(dummycallback)
def test_sni_callback_refcycle(self):
# Reference cycles through the servername callback are detected
# and cleared.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
def dummycallback(sock, servername, ctx, cycle=ctx):
pass
ctx.set_servername_callback(dummycallback)
wr = weakref.ref(ctx)
del ctx, dummycallback
gc.collect()
self.assertIs(wr(), None)
def test_cert_store_stats(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_cert_chain(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 1})
ctx.load_verify_locations(CAFILE_CACERT)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 1, 'crl': 0, 'x509': 2})
def test_get_ca_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.get_ca_certs(), [])
# CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.get_ca_certs(), [])
# but CAFILE_CACERT is a CA cert
ctx.load_verify_locations(CAFILE_CACERT)
self.assertEqual(ctx.get_ca_certs(),
[{'issuer': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', 'support@cacert.org'),)),
'notAfter': 'Mar 29 12:29:49 2033 GMT',
'notBefore': 'Mar 30 12:29:49 2003 GMT',
'serialNumber': '00',
'crlDistributionPoints': ('https://www.cacert.org/revoke.crl',),
'subject': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', 'support@cacert.org'),)),
'version': 3}])
with open(CAFILE_CACERT) as f:
pem = f.read()
der = ssl.PEM_cert_to_DER_cert(pem)
self.assertEqual(ctx.get_ca_certs(True), [der])
def test_load_default_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_default_certs(ssl.Purpose.SERVER_AUTH)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_default_certs(ssl.Purpose.CLIENT_AUTH)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertRaises(TypeError, ctx.load_default_certs, None)
self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH')
@unittest.skipIf(sys.platform == "win32", "not-Windows specific")
def test_load_default_certs_env(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with os_helper.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
ctx.load_default_certs()
self.assertEqual(ctx.cert_store_stats(), {"crl": 0, "x509": 1, "x509_ca": 0})
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
@unittest.skipIf(hasattr(sys, "gettotalrefcount"), "Debug build does not share environment between CRTs")
def test_load_default_certs_env_windows(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_default_certs()
stats = ctx.cert_store_stats()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with os_helper.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
ctx.load_default_certs()
stats["x509"] += 1
self.assertEqual(ctx.cert_store_stats(), stats)
def _assert_context_options(self, ctx):
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
if OP_NO_COMPRESSION != 0:
self.assertEqual(ctx.options & OP_NO_COMPRESSION,
OP_NO_COMPRESSION)
if OP_SINGLE_DH_USE != 0:
self.assertEqual(ctx.options & OP_SINGLE_DH_USE,
OP_SINGLE_DH_USE)
if OP_SINGLE_ECDH_USE != 0:
self.assertEqual(ctx.options & OP_SINGLE_ECDH_USE,
OP_SINGLE_ECDH_USE)
if OP_CIPHER_SERVER_PREFERENCE != 0:
self.assertEqual(ctx.options & OP_CIPHER_SERVER_PREFERENCE,
OP_CIPHER_SERVER_PREFERENCE)
def test_create_default_context(self):
ctx = ssl.create_default_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self._assert_context_options(ctx)
with open(SIGNING_CA) as f:
cadata = f.read()
ctx = ssl.create_default_context(cafile=SIGNING_CA, capath=CAPATH,
cadata=cadata)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self._assert_context_options(ctx)
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS_SERVER)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self._assert_context_options(ctx)
def test__create_stdlib_context(self):
ctx = ssl._create_stdlib_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertFalse(ctx.check_hostname)
self._assert_context_options(ctx)
if has_tls_protocol(ssl.PROTOCOL_TLSv1):
with warnings_helper.check_warnings():
ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self._assert_context_options(ctx)
with warnings_helper.check_warnings():
ctx = ssl._create_stdlib_context(
ssl.PROTOCOL_TLSv1_2,
cert_reqs=ssl.CERT_REQUIRED,
check_hostname=True
)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1_2)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self._assert_context_options(ctx)
ctx = ssl._create_stdlib_context(purpose=ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLS_SERVER)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self._assert_context_options(ctx)
def test_check_hostname(self):
with warnings_helper.check_warnings():
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS)
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
# Auto set CERT_REQUIRED
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
# Changing verify_mode does not affect check_hostname
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
ctx.check_hostname = False
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
# Auto set
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_OPTIONAL
ctx.check_hostname = False
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
# keep CERT_OPTIONAL
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
# Cannot set CERT_NONE with check_hostname enabled
with self.assertRaises(ValueError):
ctx.verify_mode = ssl.CERT_NONE
ctx.check_hostname = False
self.assertFalse(ctx.check_hostname)
ctx.verify_mode = ssl.CERT_NONE
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
def test_context_client_server(self):
# PROTOCOL_TLS_CLIENT has sane defaults
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
# PROTOCOL_TLS_SERVER has different but also sane defaults
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
def test_context_custom_class(self):
class MySSLSocket(ssl.SSLSocket):
pass
class MySSLObject(ssl.SSLObject):
pass
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
ctx.sslsocket_class = MySSLSocket
ctx.sslobject_class = MySSLObject
with ctx.wrap_socket(socket.socket(), server_side=True) as sock:
self.assertIsInstance(sock, MySSLSocket)
obj = ctx.wrap_bio(ssl.MemoryBIO(), ssl.MemoryBIO(), server_side=True)
self.assertIsInstance(obj, MySSLObject)
def test_num_tickest(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
self.assertEqual(ctx.num_tickets, 2)
ctx.num_tickets = 1
self.assertEqual(ctx.num_tickets, 1)
ctx.num_tickets = 0
self.assertEqual(ctx.num_tickets, 0)
with self.assertRaises(ValueError):
ctx.num_tickets = -1
with self.assertRaises(TypeError):
ctx.num_tickets = None
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.num_tickets, 2)
with self.assertRaises(ValueError):
ctx.num_tickets = 1
class SSLErrorTests(unittest.TestCase):
def test_str(self):
# The str() of a SSLError doesn't include the errno
e = ssl.SSLError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
# Same for a subclass
e = ssl.SSLZeroReturnError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_lib_reason(self):
# Test the library and reason attributes
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
self.assertEqual(cm.exception.library, 'PEM')
self.assertEqual(cm.exception.reason, 'NO_START_LINE')
s = str(cm.exception)
self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s)
def test_subclass(self):
# Check that the appropriate SSLError subclass is raised
# (this only tests one of them)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
with socket.create_server(("127.0.0.1", 0)) as s:
c = socket.create_connection(s.getsockname())
c.setblocking(False)
with ctx.wrap_socket(c, False, do_handshake_on_connect=False) as c:
with self.assertRaises(ssl.SSLWantReadError) as cm:
c.do_handshake()
s = str(cm.exception)
self.assertTrue(s.startswith("The operation did not complete (read)"), s)
# For compatibility
self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ)
def test_bad_server_hostname(self):
ctx = ssl.create_default_context()
with self.assertRaises(ValueError):
ctx.wrap_bio(ssl.MemoryBIO(), ssl.MemoryBIO(),
server_hostname="")
with self.assertRaises(ValueError):
ctx.wrap_bio(ssl.MemoryBIO(), ssl.MemoryBIO(),
server_hostname=".example.org")
with self.assertRaises(TypeError):
ctx.wrap_bio(ssl.MemoryBIO(), ssl.MemoryBIO(),
server_hostname="example.org\x00evil.com")
class MemoryBIOTests(unittest.TestCase):
def test_read_write(self):
bio = ssl.MemoryBIO()
bio.write(b'foo')
self.assertEqual(bio.read(), b'foo')
self.assertEqual(bio.read(), b'')
bio.write(b'foo')
bio.write(b'bar')
self.assertEqual(bio.read(), b'foobar')
self.assertEqual(bio.read(), b'')
bio.write(b'baz')
self.assertEqual(bio.read(2), b'ba')
self.assertEqual(bio.read(1), b'z')
self.assertEqual(bio.read(1), b'')
def test_eof(self):
bio = ssl.MemoryBIO()
self.assertFalse(bio.eof)
self.assertEqual(bio.read(), b'')
self.assertFalse(bio.eof)
bio.write(b'foo')
self.assertFalse(bio.eof)
bio.write_eof()
self.assertFalse(bio.eof)
self.assertEqual(bio.read(2), b'fo')
self.assertFalse(bio.eof)
self.assertEqual(bio.read(1), b'o')
self.assertTrue(bio.eof)
self.assertEqual(bio.read(), b'')
self.assertTrue(bio.eof)
def test_pending(self):
bio = ssl.MemoryBIO()
self.assertEqual(bio.pending, 0)
bio.write(b'foo')
self.assertEqual(bio.pending, 3)
for i in range(3):
bio.read(1)
self.assertEqual(bio.pending, 3-i-1)
for i in range(3):
bio.write(b'x')
self.assertEqual(bio.pending, i+1)
bio.read()
self.assertEqual(bio.pending, 0)
def test_buffer_types(self):
bio = ssl.MemoryBIO()
bio.write(b'foo')
self.assertEqual(bio.read(), b'foo')
bio.write(bytearray(b'bar'))
self.assertEqual(bio.read(), b'bar')
bio.write(memoryview(b'baz'))
self.assertEqual(bio.read(), b'baz')
def test_error_types(self):
bio = ssl.MemoryBIO()
self.assertRaises(TypeError, bio.write, 'foo')
self.assertRaises(TypeError, bio.write, None)
self.assertRaises(TypeError, bio.write, True)
self.assertRaises(TypeError, bio.write, 1)
class SSLObjectTests(unittest.TestCase):
def test_private_init(self):
bio = ssl.MemoryBIO()
with self.assertRaisesRegex(TypeError, "public constructor"):
ssl.SSLObject(bio, bio)
def test_unwrap(self):
client_ctx, server_ctx, hostname = testing_context()
c_in = ssl.MemoryBIO()
c_out = ssl.MemoryBIO()
s_in = ssl.MemoryBIO()
s_out = ssl.MemoryBIO()
client = client_ctx.wrap_bio(c_in, c_out, server_hostname=hostname)
server = server_ctx.wrap_bio(s_in, s_out, server_side=True)
# Loop on the handshake for a bit to get it settled
for _ in range(5):
try:
client.do_handshake()
except ssl.SSLWantReadError:
pass
if c_out.pending:
s_in.write(c_out.read())
try:
server.do_handshake()
except ssl.SSLWantReadError:
pass
if s_out.pending:
c_in.write(s_out.read())
# Now the handshakes should be complete (don't raise WantReadError)
client.do_handshake()
server.do_handshake()
# Now if we unwrap one side unilaterally, it should send close-notify
# and raise WantReadError:
with self.assertRaises(ssl.SSLWantReadError):
client.unwrap()
# But server.unwrap() does not raise, because it reads the client's
# close-notify:
s_in.write(c_out.read())
server.unwrap()
# And now that the client gets the server's close-notify, it doesn't
# raise either.
c_in.write(s_out.read())
client.unwrap()
class SimpleBackgroundTests(unittest.TestCase):
"""Tests that connect to a simple server running in the background"""
def setUp(self):
self.server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
self.server_context.load_cert_chain(SIGNED_CERTFILE)
server = ThreadedEchoServer(context=self.server_context)
self.enterContext(server)
self.server_addr = (HOST, server.port)
def test_connect(self):
with test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE) as s:
s.connect(self.server_addr)
self.assertEqual({}, s.getpeercert())
self.assertFalse(s.server_side)
# this should succeed because we specify the root cert
with test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SIGNING_CA) as s:
s.connect(self.server_addr)
self.assertTrue(s.getpeercert())
self.assertFalse(s.server_side)
def test_connect_fail(self):
# This should fail because we have no verification certs. Connection
# failure crashes ThreadedEchoServer, so run this in an independent
# test method.
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED)
self.addCleanup(s.close)
self.assertRaisesRegex(ssl.SSLError, "certificate verify failed",
s.connect, self.server_addr)
def test_connect_ex(self):
# Issue #11326: check connect_ex() implementation
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SIGNING_CA)
self.addCleanup(s.close)
self.assertEqual(0, s.connect_ex(self.server_addr))
self.assertTrue(s.getpeercert())
def test_non_blocking_connect_ex(self):
# Issue #11326: non-blocking connect_ex() should allow handshake
# to proceed after the socket gets ready.
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SIGNING_CA,
do_handshake_on_connect=False)
self.addCleanup(s.close)
s.setblocking(False)
rc = s.connect_ex(self.server_addr)
# EWOULDBLOCK under Windows, EINPROGRESS elsewhere
self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK))
# Wait for connect to finish
select.select([], [s], [], 5.0)
# Non-blocking handshake
while True:
try:
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [], 5.0)
except ssl.SSLWantWriteError:
select.select([], [s], [], 5.0)
# SSL established
self.assertTrue(s.getpeercert())
def test_connect_with_context(self):
# Same as test_connect, but with a separately created context
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s:
s.connect(self.server_addr)
self.assertEqual({}, s.getpeercert())
# Same with a server hostname
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname="dummy") as s:
s.connect(self.server_addr)
ctx.verify_mode = ssl.CERT_REQUIRED
# This should succeed because we specify the root cert
ctx.load_verify_locations(SIGNING_CA)
with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
def test_connect_with_context_fail(self):
# This should fail because we have no verification certs. Connection
# failure crashes ThreadedEchoServer, so run this in an independent
# test method.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
s = ctx.wrap_socket(
socket.socket(socket.AF_INET),
server_hostname=SIGNED_CERTFILE_HOSTNAME
)
self.addCleanup(s.close)
self.assertRaisesRegex(ssl.SSLError, "certificate verify failed",
s.connect, self.server_addr)
def test_connect_capath(self):
# Verify server certificates using the `capath` argument
# NOTE: the subject hashing algorithm has been changed between
# OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must
# contain both versions of each certificate (same content, different
# filename) for this test to be portable across OpenSSL releases.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(capath=CAPATH)
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname=SIGNED_CERTFILE_HOSTNAME) as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
# Same with a bytes `capath` argument
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(capath=BYTES_CAPATH)
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname=SIGNED_CERTFILE_HOSTNAME) as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
def test_connect_cadata(self):
with open(SIGNING_CA) as f:
pem = f.read()
der = ssl.PEM_cert_to_DER_cert(pem)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(cadata=pem)
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname=SIGNED_CERTFILE_HOSTNAME) as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
# same with DER
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(cadata=der)
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname=SIGNED_CERTFILE_HOSTNAME) as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
@unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows")
def test_makefile_close(self):
# Issue #5238: creating a file-like object with makefile() shouldn't
# delay closing the underlying "real socket" (here tested with its
# file descriptor, hence skipping the test under Windows).
ss = test_wrap_socket(socket.socket(socket.AF_INET))
ss.connect(self.server_addr)
fd = ss.fileno()
f = ss.makefile()
f.close()
# The fd is still open
os.read(fd, 0)
# Closing the SSL socket should close the fd too
ss.close()
gc.collect()
with self.assertRaises(OSError) as e:
os.read(fd, 0)
self.assertEqual(e.exception.errno, errno.EBADF)
def test_non_blocking_handshake(self):
s = socket.socket(socket.AF_INET)
s.connect(self.server_addr)
s.setblocking(False)
s = test_wrap_socket(s,
cert_reqs=ssl.CERT_NONE,
do_handshake_on_connect=False)
self.addCleanup(s.close)
count = 0
while True:
try:
count += 1
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [])
except ssl.SSLWantWriteError:
select.select([], [s], [])
if support.verbose:
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
_test_get_server_certificate(self, *self.server_addr, cert=SIGNING_CA)
def test_get_server_certificate_sni(self):
host, port = self.server_addr
server_names = []
# We store servername_cb arguments to make sure they match the host
def servername_cb(ssl_sock, server_name, initial_context):
server_names.append(server_name)
self.server_context.set_servername_callback(servername_cb)
pem = ssl.get_server_certificate((host, port))
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
pem = ssl.get_server_certificate((host, port), ca_certs=SIGNING_CA)
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
if support.verbose:
sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port, pem))
self.assertEqual(server_names, [host, host])
def test_get_server_certificate_fail(self):
# Connection failure crashes ThreadedEchoServer, so run this in an
# independent test method
_test_get_server_certificate_fail(self, *self.server_addr)
def test_get_server_certificate_timeout(self):
def servername_cb(ssl_sock, server_name, initial_context):
time.sleep(0.2)
self.server_context.set_servername_callback(servername_cb)
with self.assertRaises(socket.timeout):
ssl.get_server_certificate(self.server_addr, ca_certs=SIGNING_CA,
timeout=0.1)
def test_ciphers(self):
with test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="ALL") as s:
s.connect(self.server_addr)
with test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT") as s:
s.connect(self.server_addr)
# Error checking can happen at instantiation or when connecting
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
with socket.socket(socket.AF_INET) as sock:
s = test_wrap_socket(sock,
cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx")
s.connect(self.server_addr)
def test_get_ca_certs_capath(self):
# capath certs are loaded on request
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.load_verify_locations(capath=CAPATH)
self.assertEqual(ctx.get_ca_certs(), [])
with ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname='localhost') as s:
s.connect(self.server_addr)
cert = s.getpeercert()
self.assertTrue(cert)
self.assertEqual(len(ctx.get_ca_certs()), 1)
def test_context_setget(self):
# Check that the context of a connected socket can be replaced.
ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx1.load_verify_locations(capath=CAPATH)
ctx2 = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx2.load_verify_locations(capath=CAPATH)
s = socket.socket(socket.AF_INET)
with ctx1.wrap_socket(s, server_hostname='localhost') as ss:
ss.connect(self.server_addr)
self.assertIs(ss.context, ctx1)
self.assertIs(ss._sslobj.context, ctx1)
ss.context = ctx2
self.assertIs(ss.context, ctx2)
self.assertIs(ss._sslobj.context, ctx2)
def ssl_io_loop(self, sock, incoming, outgoing, func, *args, **kwargs):
# A simple IO loop. Call func(*args) depending on the error we get
# (WANT_READ or WANT_WRITE) move data between the socket and the BIOs.
timeout = kwargs.get('timeout', support.SHORT_TIMEOUT)
deadline = time.monotonic() + timeout
count = 0
while True:
if time.monotonic() > deadline:
self.fail("timeout")
errno = None
count += 1
try:
ret = func(*args)
except ssl.SSLError as e:
if e.errno not in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
raise
errno = e.errno
# Get any data from the outgoing BIO irrespective of any error, and
# send it to the socket.
buf = outgoing.read()
sock.sendall(buf)
# If there's no error, we're done. For WANT_READ, we need to get
# data from the socket and put it in the incoming BIO.
if errno is None:
break
elif errno == ssl.SSL_ERROR_WANT_READ:
buf = sock.recv(32768)
if buf:
incoming.write(buf)
else:
incoming.write_eof()
if support.verbose:
sys.stdout.write("Needed %d calls to complete %s().\n"
% (count, func.__name__))
return ret
def test_bio_handshake(self):
sock = socket.socket(socket.AF_INET)
self.addCleanup(sock.close)
sock.connect(self.server_addr)
incoming = ssl.MemoryBIO()
outgoing = ssl.MemoryBIO()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.load_verify_locations(SIGNING_CA)
sslobj = ctx.wrap_bio(incoming, outgoing, False,
SIGNED_CERTFILE_HOSTNAME)
self.assertIs(sslobj._sslobj.owner, sslobj)
self.assertIsNone(sslobj.cipher())
self.assertIsNone(sslobj.version())
self.assertIsNotNone(sslobj.shared_ciphers())
self.assertRaises(ValueError, sslobj.getpeercert)
if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES:
self.assertIsNone(sslobj.get_channel_binding('tls-unique'))
self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake)
self.assertTrue(sslobj.cipher())
self.assertIsNotNone(sslobj.shared_ciphers())
self.assertIsNotNone(sslobj.version())
self.assertTrue(sslobj.getpeercert())
if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES:
self.assertTrue(sslobj.get_channel_binding('tls-unique'))
try:
self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap)
except ssl.SSLSyscallError:
# If the server shuts down the TCP connection without sending a
# secure shutdown message, this is reported as SSL_ERROR_SYSCALL
pass
self.assertRaises(ssl.SSLError, sslobj.write, b'foo')
def test_bio_read_write_data(self):
sock = socket.socket(socket.AF_INET)
self.addCleanup(sock.close)
sock.connect(self.server_addr)
incoming = ssl.MemoryBIO()
outgoing = ssl.MemoryBIO()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
sslobj = ctx.wrap_bio(incoming, outgoing, False)
self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake)
req = b'FOO\n'
self.ssl_io_loop(sock, incoming, outgoing, sslobj.write, req)
buf = self.ssl_io_loop(sock, incoming, outgoing, sslobj.read, 1024)
self.assertEqual(buf, b'foo\n')
self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap)
@support.requires_resource('network')
class NetworkedTests(unittest.TestCase):
def test_timeout_connect_ex(self):
# Issue #12065: on a timeout, connect_ex() should return the original
# errno (mimicking the behaviour of non-SSL sockets).
with socket_helper.transient_internet(REMOTE_HOST):
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
do_handshake_on_connect=False)
self.addCleanup(s.close)
s.settimeout(0.0000001)
rc = s.connect_ex((REMOTE_HOST, 443))
if rc == 0:
self.skipTest("REMOTE_HOST responded too quickly")
elif rc == errno.ENETUNREACH:
self.skipTest("Network unreachable.")
self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK))
@unittest.skipUnless(socket_helper.IPV6_ENABLED, 'Needs IPv6')
def test_get_server_certificate_ipv6(self):
with socket_helper.transient_internet('ipv6.google.com'):
_test_get_server_certificate(self, 'ipv6.google.com', 443)
_test_get_server_certificate_fail(self, 'ipv6.google.com', 443)
def _test_get_server_certificate(test, host, port, cert=None):
pem = ssl.get_server_certificate((host, port))
if not pem:
test.fail("No server certificate on %s:%s!" % (host, port))
pem = ssl.get_server_certificate((host, port), ca_certs=cert)
if not pem:
test.fail("No server certificate on %s:%s!" % (host, port))
if support.verbose:
sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem))
def _test_get_server_certificate_fail(test, host, port):
try:
pem = ssl.get_server_certificate((host, port), ca_certs=CERTFILE)
except ssl.SSLError as x:
#should fail
if support.verbose:
sys.stdout.write("%s\n" % x)
else:
test.fail("Got server certificate %s for %s:%s!" % (pem, host, port))
from test.ssl_servers import make_https_server
class ThreadedEchoServer(threading.Thread):
class ConnectionHandler(threading.Thread):
"""A mildly complicated class, because we want it to work both
with and without the SSL wrapper around the socket connection, so
that we can test the STARTTLS functionality."""
def __init__(self, server, connsock, addr):
self.server = server
self.running = False
self.sock = connsock
self.addr = addr
self.sock.setblocking(True)
self.sslconn = None
threading.Thread.__init__(self)
self.daemon = True
def wrap_conn(self):
try:
self.sslconn = self.server.context.wrap_socket(
self.sock, server_side=True)
self.server.selected_alpn_protocols.append(self.sslconn.selected_alpn_protocol())
except (ConnectionResetError, BrokenPipeError, ConnectionAbortedError) as e:
# We treat ConnectionResetError as though it were an
# SSLError - OpenSSL on Ubuntu abruptly closes the
# connection when asked to use an unsupported protocol.
#
# BrokenPipeError is raised in TLS 1.3 mode, when OpenSSL
# tries to send session tickets after handshake.
# https://github.com/openssl/openssl/issues/6342
#
# ConnectionAbortedError is raised in TLS 1.3 mode, when OpenSSL
# tries to send session tickets after handshake when using WinSock.
self.server.conn_errors.append(str(e))
if self.server.chatty:
handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n")
self.running = False
self.close()
return False
except (ssl.SSLError, OSError) as e:
# OSError may occur with wrong protocols, e.g. both
# sides use PROTOCOL_TLS_SERVER.
#
# XXX Various errors can have happened here, for example
# a mismatching protocol version, an invalid certificate,
# or a low-level bug. This should be made more discriminating.
#
# bpo-31323: Store the exception as string to prevent
# a reference leak: server -> conn_errors -> exception
# -> traceback -> self (ConnectionHandler) -> server
self.server.conn_errors.append(str(e))
if self.server.chatty:
handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n")
# bpo-44229, bpo-43855, bpo-44237, and bpo-33450:
# Ignore spurious EPROTOTYPE returned by write() on macOS.
# See also http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno != errno.EPROTOTYPE and sys.platform != "darwin":
self.running = False
self.server.stop()
self.close()
return False
else:
self.server.shared_ciphers.append(self.sslconn.shared_ciphers())
if self.server.context.verify_mode == ssl.CERT_REQUIRED:
cert = self.sslconn.getpeercert()
if support.verbose and self.server.chatty:
sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n")
cert_binary = self.sslconn.getpeercert(True)
if support.verbose and self.server.chatty:
if cert_binary is None:
sys.stdout.write(" client did not provide a cert\n")
else:
sys.stdout.write(f" cert binary is {len(cert_binary)}b\n")
cipher = self.sslconn.cipher()
if support.verbose and self.server.chatty:
sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n")
return True
def read(self):
if self.sslconn:
return self.sslconn.read()
else:
return self.sock.recv(1024)
def write(self, bytes):
if self.sslconn:
return self.sslconn.write(bytes)
else:
return self.sock.send(bytes)
def close(self):
if self.sslconn:
self.sslconn.close()
else:
self.sock.close()
def run(self):
self.running = True
if not self.server.starttls_server:
if not self.wrap_conn():
return
while self.running:
try:
msg = self.read()
stripped = msg.strip()
if not stripped:
# eof, so quit this handler
self.running = False
try:
self.sock = self.sslconn.unwrap()
except OSError:
# Many tests shut the TCP connection down
# without an SSL shutdown. This causes
# unwrap() to raise OSError with errno=0!
pass
else:
self.sslconn = None
self.close()
elif stripped == b'over':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: client closed connection\n")
self.close()
return
elif (self.server.starttls_server and
stripped == b'STARTTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read STARTTLS from client, sending OK...\n")
self.write(b"OK\n")
if not self.wrap_conn():
return
elif (self.server.starttls_server and self.sslconn
and stripped == b'ENDTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read ENDTLS from client, sending OK...\n")
self.write(b"OK\n")
self.sock = self.sslconn.unwrap()
self.sslconn = None
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: connection is now unencrypted...\n")
elif stripped == b'CB tls-unique':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read CB tls-unique from client, sending our CB data...\n")
data = self.sslconn.get_channel_binding("tls-unique")
self.write(repr(data).encode("us-ascii") + b"\n")
elif stripped == b'PHA':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: initiating post handshake auth\n")
try:
self.sslconn.verify_client_post_handshake()
except ssl.SSLError as e:
self.write(repr(e).encode("us-ascii") + b"\n")
else:
self.write(b"OK\n")
elif stripped == b'HASCERT':
if self.sslconn.getpeercert() is not None:
self.write(b'TRUE\n')
else:
self.write(b'FALSE\n')
elif stripped == b'GETCERT':
cert = self.sslconn.getpeercert()
self.write(repr(cert).encode("us-ascii") + b"\n")
elif stripped == b'VERIFIEDCHAIN':
certs = self.sslconn._sslobj.get_verified_chain()
self.write(len(certs).to_bytes(1, "big") + b"\n")
elif stripped == b'UNVERIFIEDCHAIN':
certs = self.sslconn._sslobj.get_unverified_chain()
self.write(len(certs).to_bytes(1, "big") + b"\n")
else:
if (support.verbose and
self.server.connectionchatty):
ctype = (self.sslconn and "encrypted") or "unencrypted"
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
% (msg, ctype, msg.lower(), ctype))
self.write(msg.lower())
except OSError as e:
# handles SSLError and socket errors
if self.server.chatty and support.verbose:
if isinstance(e, ConnectionError):
# OpenSSL 1.1.1 sometimes raises
# ConnectionResetError when connection is not
# shut down gracefully.
print(
f" Connection reset by peer: {self.addr}"
)
else:
handle_error("Test server failure:\n")
try:
self.write(b"ERROR\n")
except OSError:
pass
self.close()
self.running = False
# normally, we'd just stop here, but for the test
# harness, we want to stop the server
self.server.stop()
def __init__(self, certificate=None, ssl_version=None,
certreqs=None, cacerts=None,
chatty=True, connectionchatty=False, starttls_server=False,
alpn_protocols=None,
ciphers=None, context=None):
if context:
self.context = context
else:
self.context = ssl.SSLContext(ssl_version
if ssl_version is not None
else ssl.PROTOCOL_TLS_SERVER)
self.context.verify_mode = (certreqs if certreqs is not None
else ssl.CERT_NONE)
if cacerts:
self.context.load_verify_locations(cacerts)
if certificate:
self.context.load_cert_chain(certificate)
if alpn_protocols:
self.context.set_alpn_protocols(alpn_protocols)
if ciphers:
self.context.set_ciphers(ciphers)
self.chatty = chatty
self.connectionchatty = connectionchatty
self.starttls_server = starttls_server
self.sock = socket.socket()
self.port = socket_helper.bind_port(self.sock)
self.flag = None
self.active = False
self.selected_alpn_protocols = []
self.shared_ciphers = []
self.conn_errors = []
threading.Thread.__init__(self)
self.daemon = True
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
self.stop()
self.join()
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.sock.settimeout(1.0)
self.sock.listen(5)
self.active = True
if self.flag:
# signal an event
self.flag.set()
while self.active:
try:
newconn, connaddr = self.sock.accept()
if support.verbose and self.chatty:
sys.stdout.write(' server: new connection from '
+ repr(connaddr) + '\n')
handler = self.ConnectionHandler(self, newconn, connaddr)
handler.start()
handler.join()
except TimeoutError as e:
if support.verbose:
sys.stdout.write(f' connection timeout {e!r}\n')
except KeyboardInterrupt:
self.stop()
except BaseException as e:
if support.verbose and self.chatty:
sys.stdout.write(
' connection handling failed: ' + repr(e) + '\n')
self.close()
def close(self):
if self.sock is not None:
self.sock.close()
self.sock = None
def stop(self):
self.active = False
class AsyncoreEchoServer(threading.Thread):
# this one's based on asyncore.dispatcher
class EchoServer (asyncore.dispatcher):
class ConnectionHandler(asyncore.dispatcher_with_send):
def __init__(self, conn, certfile):
self.socket = test_wrap_socket(conn, server_side=True,
certfile=certfile,
do_handshake_on_connect=False)
asyncore.dispatcher_with_send.__init__(self, self.socket)
self._ssl_accepting = True
self._do_ssl_handshake()
def readable(self):
if isinstance(self.socket, ssl.SSLSocket):
while self.socket.pending() > 0:
self.handle_read_event()
return True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except (ssl.SSLWantReadError, ssl.SSLWantWriteError):
return
except ssl.SSLEOFError:
return self.handle_close()
except ssl.SSLError:
raise
except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
data = self.recv(1024)
if support.verbose:
sys.stdout.write(" server: read %s from client\n" % repr(data))
if not data:
self.close()
else:
self.send(data.lower())
def handle_close(self):
self.close()
if support.verbose:
sys.stdout.write(" server: closed connection %s\n" % self.socket)
def handle_error(self):
raise
def __init__(self, certfile):
self.certfile = certfile
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = socket_helper.bind_port(sock, '')
asyncore.dispatcher.__init__(self, sock)
self.listen(5)
def handle_accepted(self, sock_obj, addr):
if support.verbose:
sys.stdout.write(" server: new connection from %s:%s\n" %addr)
self.ConnectionHandler(sock_obj, self.certfile)
def handle_error(self):
raise
def __init__(self, certfile):
self.flag = None
self.active = False
self.server = self.EchoServer(certfile)
self.port = self.server.port
threading.Thread.__init__(self)
self.daemon = True
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
if support.verbose:
sys.stdout.write(" cleanup: stopping server.\n")
self.stop()
if support.verbose:
sys.stdout.write(" cleanup: joining server thread.\n")
self.join()
if support.verbose:
sys.stdout.write(" cleanup: successfully joined.\n")
# make sure that ConnectionHandler is removed from socket_map
asyncore.close_all(ignore_all=True)
def start (self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.active = True
if self.flag:
self.flag.set()
while self.active:
try:
asyncore.loop(1)
except:
pass
def stop(self):
self.active = False
self.server.close()
def server_params_test(client_context, server_context, indata=b"FOO\n",
chatty=True, connectionchatty=False, sni_name=None,
session=None):
"""
Launch a server, connect a client to it and try various reads
and writes.
"""
stats = {}
server = ThreadedEchoServer(context=server_context,
chatty=chatty,
connectionchatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=sni_name, session=session) as s:
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(arg)
outdata = s.read()
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
raise AssertionError(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
stats.update({
'compression': s.compression(),
'cipher': s.cipher(),
'peercert': s.getpeercert(),
'client_alpn_protocol': s.selected_alpn_protocol(),
'version': s.version(),
'session_reused': s.session_reused,
'session': s.session,
})
s.close()
stats['server_alpn_protocols'] = server.selected_alpn_protocols
stats['server_shared_ciphers'] = server.shared_ciphers
return stats
def try_protocol_combo(server_protocol, client_protocol, expect_success,
certsreqs=None, server_options=0, client_options=0):
"""
Try to SSL-connect using *client_protocol* to *server_protocol*.
If *expect_success* is true, assert that the connection succeeds,
if it's false, assert that the connection fails.
Also, if *expect_success* is a string, assert that it is the protocol
version actually used by the connection.
"""
if certsreqs is None:
certsreqs = ssl.CERT_NONE
certtype = {
ssl.CERT_NONE: "CERT_NONE",
ssl.CERT_OPTIONAL: "CERT_OPTIONAL",
ssl.CERT_REQUIRED: "CERT_REQUIRED",
}[certsreqs]
if support.verbose:
formatstr = (expect_success and " %s->%s %s\n") or " {%s->%s} %s\n"
sys.stdout.write(formatstr %
(ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol),
certtype))
with warnings_helper.check_warnings():
# ignore Deprecation warnings
client_context = ssl.SSLContext(client_protocol)
client_context.options |= client_options
server_context = ssl.SSLContext(server_protocol)
server_context.options |= server_options
min_version = PROTOCOL_TO_TLS_VERSION.get(client_protocol, None)
if (min_version is not None
# SSLContext.minimum_version is only available on recent OpenSSL
# (setter added in OpenSSL 1.1.0, getter added in OpenSSL 1.1.1)
and hasattr(server_context, 'minimum_version')
and server_protocol == ssl.PROTOCOL_TLS
and server_context.minimum_version > min_version
):
# If OpenSSL configuration is strict and requires more recent TLS
# version, we have to change the minimum to test old TLS versions.
with warnings_helper.check_warnings():
server_context.minimum_version = min_version
# NOTE: we must enable "ALL" ciphers on the client, otherwise an
# SSLv23 client will send an SSLv3 hello (rather than SSLv2)
# starting from OpenSSL 1.0.0 (see issue #8322).
if client_context.protocol == ssl.PROTOCOL_TLS:
client_context.set_ciphers("ALL")
seclevel_workaround(server_context, client_context)
for ctx in (client_context, server_context):
ctx.verify_mode = certsreqs
ctx.load_cert_chain(SIGNED_CERTFILE)
ctx.load_verify_locations(SIGNING_CA)
try:
stats = server_params_test(client_context, server_context,
chatty=False, connectionchatty=False)
# Protocol mismatch can result in either an SSLError, or a
# "Connection reset by peer" error.
except ssl.SSLError:
if expect_success:
raise
except OSError as e:
if expect_success or e.errno != errno.ECONNRESET:
raise
else:
if not expect_success:
raise AssertionError(
"Client protocol %s succeeded with server protocol %s!"
% (ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol)))
elif (expect_success is not True
and expect_success != stats['version']):
raise AssertionError("version mismatch: expected %r, got %r"
% (expect_success, stats['version']))
class ThreadedTests(unittest.TestCase):
def test_echo(self):
"""Basic test of an SSL client connecting to a server"""
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_SERVER):
server_params_test(client_context=client_context,
server_context=server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
client_context.check_hostname = False
with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_CLIENT):
with self.assertRaises(ssl.SSLError) as e:
server_params_test(client_context=server_context,
server_context=client_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
self.assertIn(
'Cannot create a client socket with a PROTOCOL_TLS_SERVER context',
str(e.exception)
)
with self.subTest(client=ssl.PROTOCOL_TLS_SERVER, server=ssl.PROTOCOL_TLS_SERVER):
with self.assertRaises(ssl.SSLError) as e:
server_params_test(client_context=server_context,
server_context=server_context,
chatty=True, connectionchatty=True)
self.assertIn(
'Cannot create a client socket with a PROTOCOL_TLS_SERVER context',
str(e.exception)
)
with self.subTest(client=ssl.PROTOCOL_TLS_CLIENT, server=ssl.PROTOCOL_TLS_CLIENT):
with self.assertRaises(ssl.SSLError) as e:
server_params_test(client_context=server_context,
server_context=client_context,
chatty=True, connectionchatty=True)
self.assertIn(
'Cannot create a client socket with a PROTOCOL_TLS_SERVER context',
str(e.exception))
def test_getpeercert(self):
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
do_handshake_on_connect=False,
server_hostname=hostname) as s:
s.connect((HOST, server.port))
# getpeercert() raise ValueError while the handshake isn't
# done.
with self.assertRaises(ValueError):
s.getpeercert()
s.do_handshake()
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()
if support.verbose:
sys.stdout.write(pprint.pformat(cert) + '\n')
sys.stdout.write("Connection cipher is " + str(cipher) + '.\n')
if 'subject' not in cert:
self.fail("No subject field in certificate: %s." %
pprint.pformat(cert))
if ((('organizationName', 'Python Software Foundation'),)
not in cert['subject']):
self.fail(
"Missing or invalid 'organizationName' field in certificate subject; "
"should be 'Python Software Foundation'.")
self.assertIn('notBefore', cert)
self.assertIn('notAfter', cert)
before = ssl.cert_time_to_seconds(cert['notBefore'])
after = ssl.cert_time_to_seconds(cert['notAfter'])
self.assertLess(before, after)
def test_crl_check(self):
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
tf = getattr(ssl, "VERIFY_X509_TRUSTED_FIRST", 0)
self.assertEqual(client_context.verify_flags, ssl.VERIFY_DEFAULT | tf)
# VERIFY_DEFAULT should pass
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails
client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
with self.assertRaisesRegex(ssl.SSLError,
"certificate verify failed"):
s.connect((HOST, server.port))
# now load a CRL file. The CRL file is signed by the CA.
client_context.load_verify_locations(CRLFILE)
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
def test_check_hostname(self):
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
# correct hostname should verify
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# incorrect hostname should raise an exception
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname="invalid") as s:
with self.assertRaisesRegex(
ssl.CertificateError,
"Hostname mismatch, certificate is not valid for 'invalid'."):
s.connect((HOST, server.port))
# missing server_hostname arg should cause an exception, too
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with socket.socket() as s:
with self.assertRaisesRegex(ValueError,
"check_hostname requires server_hostname"):
client_context.wrap_socket(s)
@unittest.skipUnless(
ssl.HAS_NEVER_CHECK_COMMON_NAME, "test requires hostname_checks_common_name"
)
def test_hostname_checks_common_name(self):
client_context, server_context, hostname = testing_context()
assert client_context.hostname_checks_common_name
client_context.hostname_checks_common_name = False
# default cert has a SAN
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
client_context, server_context, hostname = testing_context(NOSANFILE)
client_context.hostname_checks_common_name = False
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
with self.assertRaises(ssl.SSLCertVerificationError):
s.connect((HOST, server.port))
def test_ecc_cert(self):
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.load_verify_locations(SIGNING_CA)
client_context.set_ciphers('ECDHE:ECDSA:!NULL:!aRSA')
hostname = SIGNED_CERTFILE_ECC_HOSTNAME
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# load ECC cert
server_context.load_cert_chain(SIGNED_CERTFILE_ECC)
# correct hostname should verify
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()[0].split('-')
self.assertTrue(cipher[:2], ('ECDHE', 'ECDSA'))
def test_dual_rsa_ecc(self):
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.load_verify_locations(SIGNING_CA)
# TODO: fix TLSv1.3 once SSLContext can restrict signature
# algorithms.
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
# only ECDSA certs
client_context.set_ciphers('ECDHE:ECDSA:!NULL:!aRSA')
hostname = SIGNED_CERTFILE_ECC_HOSTNAME
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
# load ECC and RSA key/cert pairs
server_context.load_cert_chain(SIGNED_CERTFILE_ECC)
server_context.load_cert_chain(SIGNED_CERTFILE)
# correct hostname should verify
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()[0].split('-')
self.assertTrue(cipher[:2], ('ECDHE', 'ECDSA'))
def test_check_hostname_idn(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(IDNSANSFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(SIGNING_CA)
# correct hostname should verify, when specified in several
# different ways
idn_hostnames = [
('könig.idn.pythontest.net',
'xn--knig-5qa.idn.pythontest.net'),
('xn--knig-5qa.idn.pythontest.net',
'xn--knig-5qa.idn.pythontest.net'),
(b'xn--knig-5qa.idn.pythontest.net',
'xn--knig-5qa.idn.pythontest.net'),
('königsgäßchen.idna2003.pythontest.net',
'xn--knigsgsschen-lcb0w.idna2003.pythontest.net'),
('xn--knigsgsschen-lcb0w.idna2003.pythontest.net',
'xn--knigsgsschen-lcb0w.idna2003.pythontest.net'),
(b'xn--knigsgsschen-lcb0w.idna2003.pythontest.net',
'xn--knigsgsschen-lcb0w.idna2003.pythontest.net'),
# ('königsgäßchen.idna2008.pythontest.net',
# 'xn--knigsgchen-b4a3dun.idna2008.pythontest.net'),
('xn--knigsgchen-b4a3dun.idna2008.pythontest.net',
'xn--knigsgchen-b4a3dun.idna2008.pythontest.net'),
(b'xn--knigsgchen-b4a3dun.idna2008.pythontest.net',
'xn--knigsgchen-b4a3dun.idna2008.pythontest.net'),
]
for server_hostname, expected_hostname in idn_hostnames:
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket(),
server_hostname=server_hostname) as s:
self.assertEqual(s.server_hostname, expected_hostname)
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertEqual(s.server_hostname, expected_hostname)
self.assertTrue(cert, "Can't get peer certificate.")
# incorrect hostname should raise an exception
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket(),
server_hostname="python.example.org") as s:
with self.assertRaises(ssl.CertificateError):
s.connect((HOST, server.port))
def test_wrong_cert_tls12(self):
"""Connecting when the server rejects the client's certificate
Launch a server with CERT_REQUIRED, and check that trying to
connect to it with a wrong client certificate fails.
"""
client_context, server_context, hostname = testing_context()
# load client cert that is not signed by trusted CA
client_context.load_cert_chain(CERTFILE)
# require TLS client authentication
server_context.verify_mode = ssl.CERT_REQUIRED
# TLS 1.3 has different handshake
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
server = ThreadedEchoServer(
context=server_context, chatty=True, connectionchatty=True,
)
with server, \
client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
try:
# Expect either an SSL error about the server rejecting
# the connection, or a low-level connection reset (which
# sometimes happens on Windows)
s.connect((HOST, server.port))
except ssl.SSLError as e:
if support.verbose:
sys.stdout.write("\nSSLError is %r\n" % e)
except OSError as e:
if e.errno != errno.ECONNRESET:
raise
if support.verbose:
sys.stdout.write("\nsocket.error is %r\n" % e)
else:
self.fail("Use of invalid cert should have failed!")
@requires_tls_version('TLSv1_3')
def test_wrong_cert_tls13(self):
client_context, server_context, hostname = testing_context()
# load client cert that is not signed by trusted CA
client_context.load_cert_chain(CERTFILE)
server_context.verify_mode = ssl.CERT_REQUIRED
server_context.minimum_version = ssl.TLSVersion.TLSv1_3
client_context.minimum_version = ssl.TLSVersion.TLSv1_3
server = ThreadedEchoServer(
context=server_context, chatty=True, connectionchatty=True,
)
with server, \
client_context.wrap_socket(socket.socket(),
server_hostname=hostname,
suppress_ragged_eofs=False) as s:
s.connect((HOST, server.port))
with self.assertRaisesRegex(
ssl.SSLError,
'alert unknown ca|EOF occurred'
):
# TLS 1.3 perform client cert exchange after handshake
s.write(b'data')
s.read(1000)
s.write(b'should have failed already')
s.read(1000)
def test_rude_shutdown(self):
"""A brutal shutdown of an SSL server should raise an OSError
in the client when attempting handshake.
"""
listener_ready = threading.Event()
listener_gone = threading.Event()
s = socket.socket()
port = socket_helper.bind_port(s, HOST)
# `listener` runs in a thread. It sits in an accept() until
# the main thread connects. Then it rudely closes the socket,
# and sets Event `listener_gone` to let the main thread know
# the socket is gone.
def listener():
s.listen()
listener_ready.set()
newsock, addr = s.accept()
newsock.close()
s.close()
listener_gone.set()
def connector():
listener_ready.wait()
with socket.socket() as c:
c.connect((HOST, port))
listener_gone.wait()
try:
ssl_sock = test_wrap_socket(c)
except OSError:
pass
else:
self.fail('connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
try:
connector()
finally:
t.join()
def test_ssl_cert_verify_error(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(SIGNED_CERTFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket(),
server_hostname=SIGNED_CERTFILE_HOSTNAME) as s:
try:
s.connect((HOST, server.port))
except ssl.SSLError as e:
msg = 'unable to get local issuer certificate'
self.assertIsInstance(e, ssl.SSLCertVerificationError)
self.assertEqual(e.verify_code, 20)
self.assertEqual(e.verify_message, msg)
self.assertIn(msg, repr(e))
self.assertIn('certificate verify failed', repr(e))
@requires_tls_version('SSLv2')
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLS, False)
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
# SSLv23 client with specific SSL options
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_TLSv1)
def test_PROTOCOL_TLS(self):
"""Connecting to an SSLv23 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
if has_tls_version('SSLv2'):
try:
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv2, True)
except OSError as x:
# this fails on some older versions of OpenSSL (0.9.7l, for instance)
if support.verbose:
sys.stdout.write(
" SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
% str(x))
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS, True)
if has_tls_version('TLSv1'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1, 'TLSv1')
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv3, False, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS, True, ssl.CERT_OPTIONAL)
if has_tls_version('TLSv1'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv3, False, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS, True, ssl.CERT_REQUIRED)
if has_tls_version('TLSv1'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
# Server with specific SSL options
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_SSLv3, False,
server_options=ssl.OP_NO_SSLv3)
# Will choose TLSv1
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS, True,
server_options=ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
if has_tls_version('TLSv1'):
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1, False,
server_options=ssl.OP_NO_TLSv1)
@requires_tls_version('SSLv3')
def test_protocol_sslv3(self):
"""Connecting to an SSLv3 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3')
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, 'SSLv3', ssl.CERT_REQUIRED)
if has_tls_version('SSLv2'):
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
@requires_tls_version('TLSv1')
def test_protocol_tlsv1(self):
"""Connecting to a TLSv1 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1')
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, 'TLSv1', ssl.CERT_REQUIRED)
if has_tls_version('SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_TLSv1)
@requires_tls_version('TLSv1_1')
def test_protocol_tlsv1_1(self):
"""Connecting to a TLSv1.1 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
if has_tls_version('SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False)
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_TLSv1_1)
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1_1, 'TLSv1.1')
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False)
@requires_tls_version('TLSv1_2')
def test_protocol_tlsv1_2(self):
"""Connecting to a TLSv1.2 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2',
server_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,
client_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,)
if has_tls_version('SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv2, False)
if has_tls_version('SSLv3'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLS, False,
client_options=ssl.OP_NO_TLSv1_2)
try_protocol_combo(ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLSv1_2, 'TLSv1.2')
if has_tls_protocol(ssl.PROTOCOL_TLSv1):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_2, False)
if has_tls_protocol(ssl.PROTOCOL_TLSv1_1):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_2, False)
def test_starttls(self):
"""Switching from clear text to encrypted and back again."""
msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6")
server = ThreadedEchoServer(CERTFILE,
starttls_server=True,
chatty=True,
connectionchatty=True)
wrapped = False
with server:
s = socket.socket()
s.setblocking(True)
s.connect((HOST, server.port))
if support.verbose:
sys.stdout.write("\n")
for indata in msgs:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
if wrapped:
conn.write(indata)
outdata = conn.read()
else:
s.send(indata)
outdata = s.recv(1024)
msg = outdata.strip().lower()
if indata == b"STARTTLS" and msg.startswith(b"ok"):
# STARTTLS ok, switch to secure mode
if support.verbose:
sys.stdout.write(
" client: read %r from server, starting TLS...\n"
% msg)
conn = test_wrap_socket(s)
wrapped = True
elif indata == b"ENDTLS" and msg.startswith(b"ok"):
# ENDTLS ok, switch back to clear text
if support.verbose:
sys.stdout.write(
" client: read %r from server, ending TLS...\n"
% msg)
s = conn.unwrap()
wrapped = False
else:
if support.verbose:
sys.stdout.write(
" client: read %r from server\n" % msg)
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
if wrapped:
conn.write(b"over\n")
else:
s.send(b"over\n")
if wrapped:
conn.close()
else:
s.close()
def test_socketserver(self):
"""Using socketserver to create and manage SSL connections."""
server = make_https_server(self, certfile=SIGNED_CERTFILE)
# try to connect
if support.verbose:
sys.stdout.write('\n')
with open(CERTFILE, 'rb') as f:
d1 = f.read()
d2 = ''
# now fetch the same data from the HTTPS server
url = 'https://localhost:%d/%s' % (
server.port, os.path.split(CERTFILE)[1])
context = ssl.create_default_context(cafile=SIGNING_CA)
f = urllib.request.urlopen(url, context=context)
try:
dlen = f.info().get("content-length")
if dlen and (int(dlen) > 0):
d2 = f.read(int(dlen))
if support.verbose:
sys.stdout.write(
" client: read %d bytes from remote server '%s'\n"
% (len(d2), server))
finally:
f.close()
self.assertEqual(d1, d2)
def test_asyncore_server(self):
"""Check the example asyncore integration."""
if support.verbose:
sys.stdout.write("\n")
indata = b"FOO\n"
server = AsyncoreEchoServer(CERTFILE)
with server:
s = test_wrap_socket(socket.socket())
s.connect(('127.0.0.1', server.port))
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(indata)
outdata = s.read()
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
self.fail(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
if support.verbose:
sys.stdout.write(" client: connection closed.\n")
def test_recv_send(self):
"""Test recv(), send() and friends."""
if support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLS_SERVER,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = test_wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE)
s.connect((HOST, server.port))
# helper methods for standardising recv* method signatures
def _recv_into():
b = bytearray(b"\0"*100)
count = s.recv_into(b)
return b[:count]
def _recvfrom_into():
b = bytearray(b"\0"*100)
count, addr = s.recvfrom_into(b)
return b[:count]
# (name, method, expect success?, *args, return value func)
send_methods = [
('send', s.send, True, [], len),
('sendto', s.sendto, False, ["some.address"], len),
('sendall', s.sendall, True, [], lambda x: None),
]
# (name, method, whether to expect success, *args)
recv_methods = [
('recv', s.recv, True, []),
('recvfrom', s.recvfrom, False, ["some.address"]),
('recv_into', _recv_into, True, []),
('recvfrom_into', _recvfrom_into, False, []),
]
data_prefix = "PREFIX_"
for (meth_name, send_meth, expect_success, args,
ret_val_meth) in send_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
ret = send_meth(indata, *args)
msg = "sending with {}".format(meth_name)
self.assertEqual(ret, ret_val_meth(indata), msg=msg)
outdata = s.read()
if outdata != indata.lower():
self.fail(
"While sending with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to send with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
for meth_name, recv_meth, expect_success, args in recv_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
s.send(indata)
outdata = recv_meth(*args)
if outdata != indata.lower():
self.fail(
"While receiving with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to receive with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
# consume data
s.read()
# read(-1, buffer) is supported, even though read(-1) is not
data = b"data"
s.send(data)
buffer = bytearray(len(data))
self.assertEqual(s.read(-1, buffer), len(data))
self.assertEqual(buffer, data)
# sendall accepts bytes-like objects
if ctypes is not None:
ubyte = ctypes.c_ubyte * len(data)
byteslike = ubyte.from_buffer_copy(data)
s.sendall(byteslike)
self.assertEqual(s.read(), data)
# Make sure sendmsg et al are disallowed to avoid
# inadvertent disclosure of data and/or corruption
# of the encrypted data stream
self.assertRaises(NotImplementedError, s.dup)
self.assertRaises(NotImplementedError, s.sendmsg, [b"data"])
self.assertRaises(NotImplementedError, s.recvmsg, 100)
self.assertRaises(NotImplementedError,
s.recvmsg_into, [bytearray(100)])
s.write(b"over\n")
self.assertRaises(ValueError, s.recv, -1)
self.assertRaises(ValueError, s.read, -1)
s.close()
def test_recv_zero(self):
server = ThreadedEchoServer(CERTFILE)
self.enterContext(server)
s = socket.create_connection((HOST, server.port))
self.addCleanup(s.close)
s = test_wrap_socket(s, suppress_ragged_eofs=False)
self.addCleanup(s.close)
# recv/read(0) should return no data
s.send(b"data")
self.assertEqual(s.recv(0), b"")
self.assertEqual(s.read(0), b"")
self.assertEqual(s.read(), b"data")
# Should not block if the other end sends no data
s.setblocking(False)
self.assertEqual(s.recv(0), b"")
self.assertEqual(s.recv_into(bytearray()), 0)
def test_nonblocking_send(self):
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLS_SERVER,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = test_wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE)
s.connect((HOST, server.port))
s.setblocking(False)
# If we keep sending data, at some point the buffers
# will be full and the call will block
buf = bytearray(8192)
def fill_buffer():
while True:
s.send(buf)
self.assertRaises((ssl.SSLWantWriteError,
ssl.SSLWantReadError), fill_buffer)
# Now read all the output and discard it
s.setblocking(True)
s.close()
def test_handshake_timeout(self):
# Issue #5103: SSL handshake must respect the socket timeout
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = socket_helper.bind_port(server)
started = threading.Event()
finish = False
def serve():
server.listen()
started.set()
conns = []
while not finish:
r, w, e = select.select([server], [], [], 0.1)
if server in r:
# Let the socket hang around rather than having
# it closed by garbage collection.
conns.append(server.accept()[0])
for sock in conns:
sock.close()
t = threading.Thread(target=serve)
t.start()
started.wait()
try:
try:
c = socket.socket(socket.AF_INET)
c.settimeout(0.2)
c.connect((host, port))
# Will attempt handshake and time out
self.assertRaisesRegex(TimeoutError, "timed out",
test_wrap_socket, c)
finally:
c.close()
try:
c = socket.socket(socket.AF_INET)
c = test_wrap_socket(c)
c.settimeout(0.2)
# Will attempt handshake and time out
self.assertRaisesRegex(TimeoutError, "timed out",
c.connect, (host, port))
finally:
c.close()
finally:
finish = True
t.join()
server.close()
def test_server_accept(self):
# Issue #16357: accept() on a SSLSocket created through
# SSLContext.wrap_socket().
client_ctx, server_ctx, hostname = testing_context()
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = socket_helper.bind_port(server)
server = server_ctx.wrap_socket(server, server_side=True)
self.assertTrue(server.server_side)
evt = threading.Event()
remote = None
peer = None
def serve():
nonlocal remote, peer
server.listen()
# Block on the accept and wait on the connection to close.
evt.set()
remote, peer = server.accept()
remote.send(remote.recv(4))
t = threading.Thread(target=serve)
t.start()
# Client wait until server setup and perform a connect.
evt.wait()
client = client_ctx.wrap_socket(
socket.socket(), server_hostname=hostname
)
client.connect((hostname, port))
client.send(b'data')
client.recv()
client_addr = client.getsockname()
client.close()
t.join()
remote.close()
server.close()
# Sanity checks.
self.assertIsInstance(remote, ssl.SSLSocket)
self.assertEqual(peer, client_addr)
def test_getpeercert_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
with context.wrap_socket(socket.socket()) as sock:
with self.assertRaises(OSError) as cm:
sock.getpeercert()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_do_handshake_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
with context.wrap_socket(socket.socket()) as sock:
with self.assertRaises(OSError) as cm:
sock.do_handshake()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_no_shared_ciphers(self):
client_context, server_context, hostname = testing_context()
# OpenSSL enables all TLS 1.3 ciphers, enforce TLS 1.2 for test
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
# Force different suites on client and server
client_context.set_ciphers("AES128")
server_context.set_ciphers("AES256")
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
with self.assertRaises(OSError):
s.connect((HOST, server.port))
self.assertIn("no shared cipher", server.conn_errors[0])
def test_version_basic(self):
"""
Basic tests for SSLSocket.version().
More tests are done in the test_protocol_*() methods.
"""
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_TLS_SERVER,
chatty=False) as server:
with context.wrap_socket(socket.socket()) as s:
self.assertIs(s.version(), None)
self.assertIs(s._sslobj, None)
s.connect((HOST, server.port))
self.assertEqual(s.version(), 'TLSv1.3')
self.assertIs(s._sslobj, None)
self.assertIs(s.version(), None)
@requires_tls_version('TLSv1_3')
def test_tls1_3(self):
client_context, server_context, hostname = testing_context()
client_context.minimum_version = ssl.TLSVersion.TLSv1_3
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertIn(s.cipher()[0], {
'TLS_AES_256_GCM_SHA384',
'TLS_CHACHA20_POLY1305_SHA256',
'TLS_AES_128_GCM_SHA256',
})
self.assertEqual(s.version(), 'TLSv1.3')
@requires_tls_version('TLSv1_2')
@requires_tls_version('TLSv1')
@ignore_deprecation
def test_min_max_version_tlsv1_2(self):
client_context, server_context, hostname = testing_context()
# client TLSv1.0 to 1.2
client_context.minimum_version = ssl.TLSVersion.TLSv1
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
# server only TLSv1.2
server_context.minimum_version = ssl.TLSVersion.TLSv1_2
server_context.maximum_version = ssl.TLSVersion.TLSv1_2
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertEqual(s.version(), 'TLSv1.2')
@requires_tls_version('TLSv1_1')
@ignore_deprecation
def test_min_max_version_tlsv1_1(self):
client_context, server_context, hostname = testing_context()
# client 1.0 to 1.2, server 1.0 to 1.1
client_context.minimum_version = ssl.TLSVersion.TLSv1
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
server_context.minimum_version = ssl.TLSVersion.TLSv1
server_context.maximum_version = ssl.TLSVersion.TLSv1_1
seclevel_workaround(client_context, server_context)
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertEqual(s.version(), 'TLSv1.1')
@requires_tls_version('TLSv1_2')
@requires_tls_version('TLSv1')
@ignore_deprecation
def test_min_max_version_mismatch(self):
client_context, server_context, hostname = testing_context()
# client 1.0, server 1.2 (mismatch)
server_context.maximum_version = ssl.TLSVersion.TLSv1_2
server_context.minimum_version = ssl.TLSVersion.TLSv1_2
client_context.maximum_version = ssl.TLSVersion.TLSv1
client_context.minimum_version = ssl.TLSVersion.TLSv1
seclevel_workaround(client_context, server_context)
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
with self.assertRaises(ssl.SSLError) as e:
s.connect((HOST, server.port))
self.assertIn("alert", str(e.exception))
@requires_tls_version('SSLv3')
def test_min_max_version_sslv3(self):
client_context, server_context, hostname = testing_context()
server_context.minimum_version = ssl.TLSVersion.SSLv3
client_context.minimum_version = ssl.TLSVersion.SSLv3
client_context.maximum_version = ssl.TLSVersion.SSLv3
seclevel_workaround(client_context, server_context)
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertEqual(s.version(), 'SSLv3')
def test_default_ecdh_curve(self):
# Issue #21015: elliptic curve-based Diffie Hellman key exchange
# should be enabled by default on SSL contexts.
client_context, server_context, hostname = testing_context()
# TLSv1.3 defaults to PFS key agreement and no longer has KEA in
# cipher name.
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
# Prior to OpenSSL 1.0.0, ECDH ciphers have to be enabled
# explicitly using the 'ECCdraft' cipher alias. Otherwise,
# our default cipher list should prefer ECDH-based ciphers
# automatically.
with ThreadedEchoServer(context=server_context) as server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertIn("ECDH", s.cipher()[0])
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
"""Test tls-unique channel binding."""
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
server = ThreadedEchoServer(context=server_context,
chatty=True,
connectionchatty=False)
with server:
with client_context.wrap_socket(
socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
# get the data
cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(
" got channel binding data: {0!r}\n".format(cb_data))
# check if it is sane
self.assertIsNotNone(cb_data)
if s.version() == 'TLSv1.3':
self.assertEqual(len(cb_data), 48)
else:
self.assertEqual(len(cb_data), 12) # True for TLSv1
# and compare with the peers version
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(cb_data).encode("us-ascii"))
# now, again
with client_context.wrap_socket(
socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
new_cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(
"got another channel binding data: {0!r}\n".format(
new_cb_data)
)
# is it really unique
self.assertNotEqual(cb_data, new_cb_data)
self.assertIsNotNone(cb_data)
if s.version() == 'TLSv1.3':
self.assertEqual(len(cb_data), 48)
else:
self.assertEqual(len(cb_data), 12) # True for TLSv1
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(new_cb_data).encode("us-ascii"))
def test_compression(self):
client_context, server_context, hostname = testing_context()
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
if support.verbose:
sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
@unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
"ssl.OP_NO_COMPRESSION needed for this test")
def test_compression_disabled(self):
client_context, server_context, hostname = testing_context()
client_context.options |= ssl.OP_NO_COMPRESSION
server_context.options |= ssl.OP_NO_COMPRESSION
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
self.assertIs(stats['compression'], None)
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_dh_params(self):
# Check we can get a connection with ephemeral Diffie-Hellman
client_context, server_context, hostname = testing_context()
# test scenario needs TLS <= 1.2
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
server_context.load_dh_params(DHFILE)
server_context.set_ciphers("kEDH")
server_context.maximum_version = ssl.TLSVersion.TLSv1_2
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
cipher = stats["cipher"][0]
parts = cipher.split("-")
if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
self.fail("Non-DH cipher: " + cipher[0])
def test_ecdh_curve(self):
# server secp384r1, client auto
client_context, server_context, hostname = testing_context()
server_context.set_ecdh_curve("secp384r1")
server_context.set_ciphers("ECDHE:!eNULL:!aNULL")
server_context.minimum_version = ssl.TLSVersion.TLSv1_2
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
# server auto, client secp384r1
client_context, server_context, hostname = testing_context()
client_context.set_ecdh_curve("secp384r1")
server_context.set_ciphers("ECDHE:!eNULL:!aNULL")
server_context.minimum_version = ssl.TLSVersion.TLSv1_2
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
# server / client curve mismatch
client_context, server_context, hostname = testing_context()
client_context.set_ecdh_curve("prime256v1")
server_context.set_ecdh_curve("secp384r1")
server_context.set_ciphers("ECDHE:!eNULL:!aNULL")
server_context.minimum_version = ssl.TLSVersion.TLSv1_2
with self.assertRaises(ssl.SSLError):
server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
def test_selected_alpn_protocol(self):
# selected_alpn_protocol() is None unless ALPN is used.
client_context, server_context, hostname = testing_context()
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
self.assertIs(stats['client_alpn_protocol'], None)
def test_selected_alpn_protocol_if_server_uses_alpn(self):
# selected_alpn_protocol() is None unless ALPN is used by the client.
client_context, server_context, hostname = testing_context()
server_context.set_alpn_protocols(['foo', 'bar'])
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True,
sni_name=hostname)
self.assertIs(stats['client_alpn_protocol'], None)
def test_alpn_protocols(self):
server_protocols = ['foo', 'bar', 'milkshake']
protocol_tests = [
(['foo', 'bar'], 'foo'),
(['bar', 'foo'], 'foo'),
(['milkshake'], 'milkshake'),
(['http/3.0', 'http/4.0'], None)
]
for client_protocols, expected in protocol_tests:
client_context, server_context, hostname = testing_context()
server_context.set_alpn_protocols(server_protocols)
client_context.set_alpn_protocols(client_protocols)
try:
stats = server_params_test(client_context,
server_context,
chatty=True,
connectionchatty=True,
sni_name=hostname)
except ssl.SSLError as e:
stats = e
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_alpn_protocol']
self.assertEqual(client_result, expected,
msg % (client_result, "client"))
server_result = stats['server_alpn_protocols'][-1] \
if len(stats['server_alpn_protocols']) else 'nothing'
self.assertEqual(server_result, expected,
msg % (server_result, "server"))
def test_npn_protocols(self):
assert not ssl.HAS_NPN
def sni_contexts(self):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(SIGNED_CERTFILE)
other_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
other_context.load_cert_chain(SIGNED_CERTFILE2)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.load_verify_locations(SIGNING_CA)
return server_context, other_context, client_context
def check_common_name(self, stats, name):
cert = stats['peercert']
self.assertIn((('commonName', name),), cert['subject'])
def test_sni_callback(self):
calls = []
server_context, other_context, client_context = self.sni_contexts()
client_context.check_hostname = False
def servername_cb(ssl_sock, server_name, initial_context):
calls.append((server_name, initial_context))
if server_name is not None:
ssl_sock.context = other_context
server_context.set_servername_callback(servername_cb)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='supermessage')
# The hostname was fetched properly, and the certificate was
# changed for the connection.
self.assertEqual(calls, [("supermessage", server_context)])
# CERTFILE4 was selected
self.check_common_name(stats, 'fakehostname')
calls = []
# The callback is called with server_name=None
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name=None)
self.assertEqual(calls, [(None, server_context)])
self.check_common_name(stats, SIGNED_CERTFILE_HOSTNAME)
# Check disabling the callback
calls = []
server_context.set_servername_callback(None)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='notfunny')
# Certificate didn't change
self.check_common_name(stats, SIGNED_CERTFILE_HOSTNAME)
self.assertEqual(calls, [])
def test_sni_callback_alert(self):
# Returning a TLS alert is reflected to the connecting client
server_context, other_context, client_context = self.sni_contexts()
def cb_returning_alert(ssl_sock, server_name, initial_context):
return ssl.ALERT_DESCRIPTION_ACCESS_DENIED
server_context.set_servername_callback(cb_returning_alert)
with self.assertRaises(ssl.SSLError) as cm:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED')
def test_sni_callback_raising(self):
# Raising fails the connection with a TLS handshake failure alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_raising(ssl_sock, server_name, initial_context):
1/0
server_context.set_servername_callback(cb_raising)
with support.catch_unraisable_exception() as catch:
with self.assertRaises(ssl.SSLError) as cm:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason,
'SSLV3_ALERT_HANDSHAKE_FAILURE')
self.assertEqual(catch.unraisable.exc_type, ZeroDivisionError)
def test_sni_callback_wrong_return_type(self):
# Returning the wrong return type terminates the TLS connection
# with an internal error alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_wrong_return_type(ssl_sock, server_name, initial_context):
return "foo"
server_context.set_servername_callback(cb_wrong_return_type)
with support.catch_unraisable_exception() as catch:
with self.assertRaises(ssl.SSLError) as cm:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR')
self.assertEqual(catch.unraisable.exc_type, TypeError)
def test_shared_ciphers(self):
client_context, server_context, hostname = testing_context()
client_context.set_ciphers("AES128:AES256")
server_context.set_ciphers("AES256")
expected_algs = [
"AES256", "AES-256",
# TLS 1.3 ciphers are always enabled
"TLS_CHACHA20", "TLS_AES",
]
stats = server_params_test(client_context, server_context,
sni_name=hostname)
ciphers = stats['server_shared_ciphers'][0]
self.assertGreater(len(ciphers), 0)
for name, tls_version, bits in ciphers:
if not any(alg in name for alg in expected_algs):
self.fail(name)
def test_read_write_after_close_raises_valuerror(self):
client_context, server_context, hostname = testing_context()
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
s = client_context.wrap_socket(socket.socket(),
server_hostname=hostname)
s.connect((HOST, server.port))
s.close()
self.assertRaises(ValueError, s.read, 1024)
self.assertRaises(ValueError, s.write, b'hello')
def test_sendfile(self):
TEST_DATA = b"x" * 512
with open(os_helper.TESTFN, 'wb') as f:
f.write(TEST_DATA)
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
client_context, server_context, hostname = testing_context()
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
with open(os_helper.TESTFN, 'rb') as file:
s.sendfile(file)
self.assertEqual(s.recv(1024), TEST_DATA)
def test_session(self):
client_context, server_context, hostname = testing_context()
# TODO: sessions aren't compatible with TLSv1.3 yet
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
# first connection without session
stats = server_params_test(client_context, server_context,
sni_name=hostname)
session = stats['session']
self.assertTrue(session.id)
self.assertGreater(session.time, 0)
self.assertGreater(session.timeout, 0)
self.assertTrue(session.has_ticket)
self.assertGreater(session.ticket_lifetime_hint, 0)
self.assertFalse(stats['session_reused'])
sess_stat = server_context.session_stats()
self.assertEqual(sess_stat['accept'], 1)
self.assertEqual(sess_stat['hits'], 0)
# reuse session
stats = server_params_test(client_context, server_context,
session=session, sni_name=hostname)
sess_stat = server_context.session_stats()
self.assertEqual(sess_stat['accept'], 2)
self.assertEqual(sess_stat['hits'], 1)
self.assertTrue(stats['session_reused'])
session2 = stats['session']
self.assertEqual(session2.id, session.id)
self.assertEqual(session2, session)
self.assertIsNot(session2, session)
self.assertGreaterEqual(session2.time, session.time)
self.assertGreaterEqual(session2.timeout, session.timeout)
# another one without session
stats = server_params_test(client_context, server_context,
sni_name=hostname)
self.assertFalse(stats['session_reused'])
session3 = stats['session']
self.assertNotEqual(session3.id, session.id)
self.assertNotEqual(session3, session)
sess_stat = server_context.session_stats()
self.assertEqual(sess_stat['accept'], 3)
self.assertEqual(sess_stat['hits'], 1)
# reuse session again
stats = server_params_test(client_context, server_context,
session=session, sni_name=hostname)
self.assertTrue(stats['session_reused'])
session4 = stats['session']
self.assertEqual(session4.id, session.id)
self.assertEqual(session4, session)
self.assertGreaterEqual(session4.time, session.time)
self.assertGreaterEqual(session4.timeout, session.timeout)
sess_stat = server_context.session_stats()
self.assertEqual(sess_stat['accept'], 4)
self.assertEqual(sess_stat['hits'], 2)
def test_session_handling(self):
client_context, server_context, hostname = testing_context()
client_context2, _, _ = testing_context()
# TODO: session reuse does not work with TLSv1.3
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
client_context2.maximum_version = ssl.TLSVersion.TLSv1_2
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
# session is None before handshake
self.assertEqual(s.session, None)
self.assertEqual(s.session_reused, None)
s.connect((HOST, server.port))
session = s.session
self.assertTrue(session)
with self.assertRaises(TypeError) as e:
s.session = object
self.assertEqual(str(e.exception), 'Value is not a SSLSession.')
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
# cannot set session after handshake
with self.assertRaises(ValueError) as e:
s.session = session
self.assertEqual(str(e.exception),
'Cannot set session after handshake.')
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
# can set session before handshake and before the
# connection was established
s.session = session
s.connect((HOST, server.port))
self.assertEqual(s.session.id, session.id)
self.assertEqual(s.session, session)
self.assertEqual(s.session_reused, True)
with client_context2.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
# cannot re-use session with a different SSLContext
with self.assertRaises(ValueError) as e:
s.session = session
s.connect((HOST, server.port))
self.assertEqual(str(e.exception),
'Session refers to a different SSLContext.')
@unittest.skipUnless(has_tls_version('TLSv1_3'), "Test needs TLS 1.3")
class TestPostHandshakeAuth(unittest.TestCase):
def test_pha_setter(self):
protocols = [
ssl.PROTOCOL_TLS_SERVER, ssl.PROTOCOL_TLS_CLIENT
]
for protocol in protocols:
ctx = ssl.SSLContext(protocol)
self.assertEqual(ctx.post_handshake_auth, False)
ctx.post_handshake_auth = True
self.assertEqual(ctx.post_handshake_auth, True)
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(ctx.post_handshake_auth, True)
ctx.post_handshake_auth = False
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(ctx.post_handshake_auth, False)
ctx.verify_mode = ssl.CERT_OPTIONAL
ctx.post_handshake_auth = True
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
self.assertEqual(ctx.post_handshake_auth, True)
def test_pha_required(self):
client_context, server_context, hostname = testing_context()
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.post_handshake_auth = True
client_context.load_cert_chain(SIGNED_CERTFILE)
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'FALSE\n')
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'TRUE\n')
# PHA method just returns true when cert is already available
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
s.write(b'GETCERT')
cert_text = s.recv(4096).decode('us-ascii')
self.assertIn('Python Software Foundation CA', cert_text)
def test_pha_required_nocert(self):
client_context, server_context, hostname = testing_context()
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.post_handshake_auth = True
def msg_cb(conn, direction, version, content_type, msg_type, data):
if support.verbose and content_type == _TLSContentType.ALERT:
info = (conn, direction, version, content_type, msg_type, data)
sys.stdout.write(f"TLS: {info!r}\n")
server_context._msg_callback = msg_cb
client_context._msg_callback = msg_cb
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname,
suppress_ragged_eofs=False) as s:
s.connect((HOST, server.port))
s.write(b'PHA')
# test sometimes fails with EOF error. Test passes as long as
# server aborts connection with an error.
with self.assertRaisesRegex(
ssl.SSLError,
'(certificate required|EOF occurred)'
):
# receive CertificateRequest
data = s.recv(1024)
self.assertEqual(data, b'OK\n')
# send empty Certificate + Finish
s.write(b'HASCERT')
# receive alert
s.recv(1024)
def test_pha_optional(self):
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.post_handshake_auth = True
client_context.load_cert_chain(SIGNED_CERTFILE)
# check CERT_OPTIONAL
server_context.verify_mode = ssl.CERT_OPTIONAL
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'FALSE\n')
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'TRUE\n')
def test_pha_optional_nocert(self):
if support.verbose:
sys.stdout.write("\n")
client_context, server_context, hostname = testing_context()
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_OPTIONAL
client_context.post_handshake_auth = True
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'FALSE\n')
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
# optional doesn't fail when client does not have a cert
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'FALSE\n')
def test_pha_no_pha_client(self):
client_context, server_context, hostname = testing_context()
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.load_cert_chain(SIGNED_CERTFILE)
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
with self.assertRaisesRegex(ssl.SSLError, 'not server'):
s.verify_client_post_handshake()
s.write(b'PHA')
self.assertIn(b'extension not received', s.recv(1024))
def test_pha_no_pha_server(self):
# server doesn't have PHA enabled, cert is requested in handshake
client_context, server_context, hostname = testing_context()
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.post_handshake_auth = True
client_context.load_cert_chain(SIGNED_CERTFILE)
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'TRUE\n')
# PHA doesn't fail if there is already a cert
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'TRUE\n')
def test_pha_not_tls13(self):
# TLS 1.2
client_context, server_context, hostname = testing_context()
server_context.verify_mode = ssl.CERT_REQUIRED
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
client_context.post_handshake_auth = True
client_context.load_cert_chain(SIGNED_CERTFILE)
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
# PHA fails for TLS != 1.3
s.write(b'PHA')
self.assertIn(b'WRONG_SSL_VERSION', s.recv(1024))
def test_bpo37428_pha_cert_none(self):
# verify that post_handshake_auth does not implicitly enable cert
# validation.
hostname = SIGNED_CERTFILE_HOSTNAME
client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
client_context.post_handshake_auth = True
client_context.load_cert_chain(SIGNED_CERTFILE)
# no cert validation and CA on client side
client_context.check_hostname = False
client_context.verify_mode = ssl.CERT_NONE
server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
server_context.load_cert_chain(SIGNED_CERTFILE)
server_context.load_verify_locations(SIGNING_CA)
server_context.post_handshake_auth = True
server_context.verify_mode = ssl.CERT_REQUIRED
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'FALSE\n')
s.write(b'PHA')
self.assertEqual(s.recv(1024), b'OK\n')
s.write(b'HASCERT')
self.assertEqual(s.recv(1024), b'TRUE\n')
# server cert has not been validated
self.assertEqual(s.getpeercert(), {})
def test_internal_chain_client(self):
client_context, server_context, hostname = testing_context(
server_chain=False
)
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(
socket.socket(),
server_hostname=hostname
) as s:
s.connect((HOST, server.port))
vc = s._sslobj.get_verified_chain()
self.assertEqual(len(vc), 2)
ee, ca = vc
uvc = s._sslobj.get_unverified_chain()
self.assertEqual(len(uvc), 1)
self.assertEqual(ee, uvc[0])
self.assertEqual(hash(ee), hash(uvc[0]))
self.assertEqual(repr(ee), repr(uvc[0]))
self.assertNotEqual(ee, ca)
self.assertNotEqual(hash(ee), hash(ca))
self.assertNotEqual(repr(ee), repr(ca))
self.assertNotEqual(ee.get_info(), ca.get_info())
self.assertIn("CN=localhost", repr(ee))
self.assertIn("CN=our-ca-server", repr(ca))
pem = ee.public_bytes(_ssl.ENCODING_PEM)
der = ee.public_bytes(_ssl.ENCODING_DER)
self.assertIsInstance(pem, str)
self.assertIn("-----BEGIN CERTIFICATE-----", pem)
self.assertIsInstance(der, bytes)
self.assertEqual(
ssl.PEM_cert_to_DER_cert(pem), der
)
def test_internal_chain_server(self):
client_context, server_context, hostname = testing_context()
client_context.load_cert_chain(SIGNED_CERTFILE)
server_context.verify_mode = ssl.CERT_REQUIRED
server_context.maximum_version = ssl.TLSVersion.TLSv1_2
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(
socket.socket(),
server_hostname=hostname
) as s:
s.connect((HOST, server.port))
s.write(b'VERIFIEDCHAIN\n')
res = s.recv(1024)
self.assertEqual(res, b'\x02\n')
s.write(b'UNVERIFIEDCHAIN\n')
res = s.recv(1024)
self.assertEqual(res, b'\x02\n')
HAS_KEYLOG = hasattr(ssl.SSLContext, 'keylog_filename')
requires_keylog = unittest.skipUnless(
HAS_KEYLOG, 'test requires OpenSSL 1.1.1 with keylog callback')
class TestSSLDebug(unittest.TestCase):
def keylog_lines(self, fname=os_helper.TESTFN):
with open(fname) as f:
return len(list(f))
@requires_keylog
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_keylog_defaults(self):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.keylog_filename, None)
self.assertFalse(os.path.isfile(os_helper.TESTFN))
ctx.keylog_filename = os_helper.TESTFN
self.assertEqual(ctx.keylog_filename, os_helper.TESTFN)
self.assertTrue(os.path.isfile(os_helper.TESTFN))
self.assertEqual(self.keylog_lines(), 1)
ctx.keylog_filename = None
self.assertEqual(ctx.keylog_filename, None)
with self.assertRaises((IsADirectoryError, PermissionError)):
# Windows raises PermissionError
ctx.keylog_filename = os.path.dirname(
os.path.abspath(os_helper.TESTFN))
with self.assertRaises(TypeError):
ctx.keylog_filename = 1
@requires_keylog
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_keylog_filename(self):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
client_context, server_context, hostname = testing_context()
client_context.keylog_filename = os_helper.TESTFN
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
# header, 5 lines for TLS 1.3
self.assertEqual(self.keylog_lines(), 6)
client_context.keylog_filename = None
server_context.keylog_filename = os_helper.TESTFN
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertGreaterEqual(self.keylog_lines(), 11)
client_context.keylog_filename = os_helper.TESTFN
server_context.keylog_filename = os_helper.TESTFN
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertGreaterEqual(self.keylog_lines(), 21)
client_context.keylog_filename = None
server_context.keylog_filename = None
@requires_keylog
@unittest.skipIf(sys.flags.ignore_environment,
"test is not compatible with ignore_environment")
@unittest.skipIf(Py_DEBUG_WIN32, "Avoid mixing debug/release CRT on Windows")
def test_keylog_env(self):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with unittest.mock.patch.dict(os.environ):
os.environ['SSLKEYLOGFILE'] = os_helper.TESTFN
self.assertEqual(os.environ['SSLKEYLOGFILE'], os_helper.TESTFN)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(ctx.keylog_filename, None)
ctx = ssl.create_default_context()
self.assertEqual(ctx.keylog_filename, os_helper.TESTFN)
ctx = ssl._create_stdlib_context()
self.assertEqual(ctx.keylog_filename, os_helper.TESTFN)
def test_msg_callback(self):
client_context, server_context, hostname = testing_context()
def msg_cb(conn, direction, version, content_type, msg_type, data):
pass
self.assertIs(client_context._msg_callback, None)
client_context._msg_callback = msg_cb
self.assertIs(client_context._msg_callback, msg_cb)
with self.assertRaises(TypeError):
client_context._msg_callback = object()
def test_msg_callback_tls12(self):
client_context, server_context, hostname = testing_context()
client_context.maximum_version = ssl.TLSVersion.TLSv1_2
msg = []
def msg_cb(conn, direction, version, content_type, msg_type, data):
self.assertIsInstance(conn, ssl.SSLSocket)
self.assertIsInstance(data, bytes)
self.assertIn(direction, {'read', 'write'})
msg.append((direction, version, content_type, msg_type))
client_context._msg_callback = msg_cb
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
self.assertIn(
("read", TLSVersion.TLSv1_2, _TLSContentType.HANDSHAKE,
_TLSMessageType.SERVER_KEY_EXCHANGE),
msg
)
self.assertIn(
("write", TLSVersion.TLSv1_2, _TLSContentType.CHANGE_CIPHER_SPEC,
_TLSMessageType.CHANGE_CIPHER_SPEC),
msg
)
def test_msg_callback_deadlock_bpo43577(self):
client_context, server_context, hostname = testing_context()
server_context2 = testing_context()[1]
def msg_cb(conn, direction, version, content_type, msg_type, data):
pass
def sni_cb(sock, servername, ctx):
sock.context = server_context2
server_context._msg_callback = msg_cb
server_context.sni_callback = sni_cb
server = ThreadedEchoServer(context=server_context, chatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
with client_context.wrap_socket(socket.socket(),
server_hostname=hostname) as s:
s.connect((HOST, server.port))
class TestEnumerations(unittest.TestCase):
def test_tlsversion(self):
class CheckedTLSVersion(enum.IntEnum):
MINIMUM_SUPPORTED = _ssl.PROTO_MINIMUM_SUPPORTED
SSLv3 = _ssl.PROTO_SSLv3
TLSv1 = _ssl.PROTO_TLSv1
TLSv1_1 = _ssl.PROTO_TLSv1_1
TLSv1_2 = _ssl.PROTO_TLSv1_2
TLSv1_3 = _ssl.PROTO_TLSv1_3
MAXIMUM_SUPPORTED = _ssl.PROTO_MAXIMUM_SUPPORTED
enum._test_simple_enum(CheckedTLSVersion, TLSVersion)
def test_tlscontenttype(self):
class Checked_TLSContentType(enum.IntEnum):
"""Content types (record layer)
See RFC 8446, section B.1
"""
CHANGE_CIPHER_SPEC = 20
ALERT = 21
HANDSHAKE = 22
APPLICATION_DATA = 23
# pseudo content types
HEADER = 0x100
INNER_CONTENT_TYPE = 0x101
enum._test_simple_enum(Checked_TLSContentType, _TLSContentType)
def test_tlsalerttype(self):
class Checked_TLSAlertType(enum.IntEnum):
"""Alert types for TLSContentType.ALERT messages
See RFC 8466, section B.2
"""
CLOSE_NOTIFY = 0
UNEXPECTED_MESSAGE = 10
BAD_RECORD_MAC = 20
DECRYPTION_FAILED = 21
RECORD_OVERFLOW = 22
DECOMPRESSION_FAILURE = 30
HANDSHAKE_FAILURE = 40
NO_CERTIFICATE = 41
BAD_CERTIFICATE = 42
UNSUPPORTED_CERTIFICATE = 43
CERTIFICATE_REVOKED = 44
CERTIFICATE_EXPIRED = 45
CERTIFICATE_UNKNOWN = 46
ILLEGAL_PARAMETER = 47
UNKNOWN_CA = 48
ACCESS_DENIED = 49
DECODE_ERROR = 50
DECRYPT_ERROR = 51
EXPORT_RESTRICTION = 60
PROTOCOL_VERSION = 70
INSUFFICIENT_SECURITY = 71
INTERNAL_ERROR = 80
INAPPROPRIATE_FALLBACK = 86
USER_CANCELED = 90
NO_RENEGOTIATION = 100
MISSING_EXTENSION = 109
UNSUPPORTED_EXTENSION = 110
CERTIFICATE_UNOBTAINABLE = 111
UNRECOGNIZED_NAME = 112
BAD_CERTIFICATE_STATUS_RESPONSE = 113
BAD_CERTIFICATE_HASH_VALUE = 114
UNKNOWN_PSK_IDENTITY = 115
CERTIFICATE_REQUIRED = 116
NO_APPLICATION_PROTOCOL = 120
enum._test_simple_enum(Checked_TLSAlertType, _TLSAlertType)
def test_tlsmessagetype(self):
class Checked_TLSMessageType(enum.IntEnum):
"""Message types (handshake protocol)
See RFC 8446, section B.3
"""
HELLO_REQUEST = 0
CLIENT_HELLO = 1
SERVER_HELLO = 2
HELLO_VERIFY_REQUEST = 3
NEWSESSION_TICKET = 4
END_OF_EARLY_DATA = 5
HELLO_RETRY_REQUEST = 6
ENCRYPTED_EXTENSIONS = 8
CERTIFICATE = 11
SERVER_KEY_EXCHANGE = 12
CERTIFICATE_REQUEST = 13
SERVER_DONE = 14
CERTIFICATE_VERIFY = 15
CLIENT_KEY_EXCHANGE = 16
FINISHED = 20
CERTIFICATE_URL = 21
CERTIFICATE_STATUS = 22
SUPPLEMENTAL_DATA = 23
KEY_UPDATE = 24
NEXT_PROTO = 67
MESSAGE_HASH = 254
CHANGE_CIPHER_SPEC = 0x0101
enum._test_simple_enum(Checked_TLSMessageType, _TLSMessageType)
def test_sslmethod(self):
Checked_SSLMethod = enum._old_convert_(
enum.IntEnum, '_SSLMethod', 'ssl',
lambda name: name.startswith('PROTOCOL_') and name != 'PROTOCOL_SSLv23',
source=ssl._ssl,
)
# This member is assigned dynamically in `ssl.py`:
Checked_SSLMethod.PROTOCOL_SSLv23 = Checked_SSLMethod.PROTOCOL_TLS
enum._test_simple_enum(Checked_SSLMethod, ssl._SSLMethod)
def test_options(self):
CheckedOptions = enum._old_convert_(
enum.IntFlag, 'Options', 'ssl',
lambda name: name.startswith('OP_'),
source=ssl._ssl,
)
enum._test_simple_enum(CheckedOptions, ssl.Options)
def test_alertdescription(self):
CheckedAlertDescription = enum._old_convert_(
enum.IntEnum, 'AlertDescription', 'ssl',
lambda name: name.startswith('ALERT_DESCRIPTION_'),
source=ssl._ssl,
)
enum._test_simple_enum(CheckedAlertDescription, ssl.AlertDescription)
def test_sslerrornumber(self):
Checked_SSLErrorNumber = enum._old_convert_(
enum.IntEnum, 'SSLErrorNumber', 'ssl',
lambda name: name.startswith('SSL_ERROR_'),
source=ssl._ssl,
)
enum._test_simple_enum(Checked_SSLErrorNumber, ssl.SSLErrorNumber)
def test_verifyflags(self):
CheckedVerifyFlags = enum._old_convert_(
enum.IntFlag, 'VerifyFlags', 'ssl',
lambda name: name.startswith('VERIFY_'),
source=ssl._ssl,
)
enum._test_simple_enum(CheckedVerifyFlags, ssl.VerifyFlags)
def test_verifymode(self):
CheckedVerifyMode = enum._old_convert_(
enum.IntEnum, 'VerifyMode', 'ssl',
lambda name: name.startswith('CERT_'),
source=ssl._ssl,
)
enum._test_simple_enum(CheckedVerifyMode, ssl.VerifyMode)
def setUpModule():
if support.verbose:
plats = {
'Mac': platform.mac_ver,
'Windows': platform.win32_ver,
}
for name, func in plats.items():
plat = func()
if plat and plat[0]:
plat = '%s %r' % (name, plat)
break
else:
plat = repr(platform.platform())
print("test_ssl: testing with %r %r" %
(ssl.OPENSSL_VERSION, ssl.OPENSSL_VERSION_INFO))
print(" under %s" % plat)
print(" HAS_SNI = %r" % ssl.HAS_SNI)
print(" OP_ALL = 0x%8x" % ssl.OP_ALL)
try:
print(" OP_NO_TLSv1_1 = 0x%8x" % ssl.OP_NO_TLSv1_1)
except AttributeError:
pass
for filename in [
CERTFILE, BYTES_CERTFILE,
ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY,
SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA,
BADCERT, BADKEY, EMPTYCERT]:
if not os.path.exists(filename):
raise support.TestFailed("Can't read certificate file %r" % filename)
thread_info = threading_helper.threading_setup()
unittest.addModuleCleanup(threading_helper.threading_cleanup, *thread_info)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "a0c57d1d8227b69d8fa288c99f58de22",
"timestamp": "",
"source": "github",
"line_count": 5072,
"max_line_length": 118,
"avg_line_length": 42.57117507886435,
"alnum_prop": 0.5686061105682171,
"repo_name": "brython-dev/brython",
"id": "3b3b869bb53acdc4130ca949d9df92e3c2babfed",
"size": "215974",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "www/src/Lib/test/test_ssl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "24308"
},
{
"name": "HTML",
"bytes": "5144999"
},
{
"name": "JavaScript",
"bytes": "4143100"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Python",
"bytes": "22236375"
},
{
"name": "Roff",
"bytes": "21126"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
}
|
from oslo_log import log as logging
from trove.common import cfg
from trove.common import exception
from trove.common.remote import create_nova_client
from trove.common import utils
from trove.db import get_db_api
from trove.db import models as dbmodels
from trove.flavor.models import Flavor as flavor_model
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
db_api = get_db_api()
def persisted_models():
return {
'datastore': DBDatastore,
'capabilities': DBCapabilities,
'datastore_version': DBDatastoreVersion,
'capability_overrides': DBCapabilityOverrides,
'datastore_version_metadata': DBDatastoreVersionMetadata
}
class DBDatastore(dbmodels.DatabaseModelBase):
_data_fields = ['id', 'name', 'default_version_id']
class DBCapabilities(dbmodels.DatabaseModelBase):
_data_fields = ['id', 'name', 'description', 'enabled']
class DBCapabilityOverrides(dbmodels.DatabaseModelBase):
_data_fields = ['id', 'capability_id', 'datastore_version_id', 'enabled']
class DBDatastoreVersion(dbmodels.DatabaseModelBase):
_data_fields = ['id', 'datastore_id', 'name', 'manager', 'image_id',
'packages', 'active']
class DBDatastoreVersionMetadata(dbmodels.DatabaseModelBase):
_data_fields = ['id', 'datastore_version_id', 'key', 'value',
'created', 'deleted', 'deleted_at', 'updated_at']
preserve_on_delete = True
class Capabilities(object):
def __init__(self, datastore_version_id=None):
self.capabilities = []
self.datastore_version_id = datastore_version_id
def __contains__(self, item):
return item in [capability.name for capability in self.capabilities]
def __len__(self):
return len(self.capabilities)
def __iter__(self):
for item in self.capabilities:
yield item
def __repr__(self):
return '<%s: %s>' % (type(self), self.capabilities)
def add(self, capability, enabled):
"""
Add a capability override to a datastore version.
"""
if self.datastore_version_id is not None:
DBCapabilityOverrides.create(
capability_id=capability.id,
datastore_version_id=self.datastore_version_id,
enabled=enabled)
self._load()
def _load(self):
"""
Bulk load and override default capabilities with configured
datastore version specific settings.
"""
capability_defaults = [Capability(c)
for c in DBCapabilities.find_all()]
capability_overrides = []
if self.datastore_version_id is not None:
# This should always happen but if there is any future case where
# we don't have a datastore version id number it won't stop
# defaults from rendering.
capability_overrides = [
CapabilityOverride(ce)
for ce in DBCapabilityOverrides.find_all(
datastore_version_id=self.datastore_version_id)
]
def override(cap):
# This logic is necessary to apply datastore version specific
# capability overrides when they are present in the database.
for capability_override in capability_overrides:
if cap.id == capability_override.capability_id:
# we have a mapped entity that indicates this datastore
# version has an override so we honor that.
return capability_override
# There were no overrides for this capability so we just hand it
# right back.
return cap
self.capabilities = map(override, capability_defaults)
LOG.debug('Capabilities for datastore %(ds_id)s: %(capabilities)s' %
{'ds_id': self.datastore_version_id,
'capabilities': self.capabilities})
@classmethod
def load(cls, datastore_version_id=None):
"""
Generates a Capabilities object by looking up all capabilities from
defaults and overrides and provides the one structure that should be
used as the interface to controlling capabilities per datastore.
:returns Capabilities:
"""
self = cls(datastore_version_id)
self._load()
return self
class BaseCapability(object):
def __init__(self, db_info):
self.db_info = db_info
def __repr__(self):
return ('<%(my_class)s: name: %(name)s, enabled: %(enabled)s>' %
{'my_class': type(self), 'name': self.name,
'enabled': self.enabled})
@property
def id(self):
"""
The capability's id
:returns str:
"""
return self.db_info.id
@property
def enabled(self):
"""
Is the capability/feature enabled?
:returns bool:
"""
return self.db_info.enabled
def enable(self):
"""
Enable the capability.
"""
self.db_info.enabled = True
self.db_info.save()
def disable(self):
"""
Disable the capability
"""
self.db_info.enabled = False
self.db_info.save()
def delete(self):
"""
Delete the capability from the database.
"""
self.db_info.delete()
class CapabilityOverride(BaseCapability):
"""
A capability override is simply an setting that applies to a
specific datastore version that overrides the default setting in the
base capability's entry for Trove.
"""
def __init__(self, db_info):
super(CapabilityOverride, self).__init__(db_info)
# This *may* be better solved with a join in the SQLAlchemy model but
# I was unable to get our query object to work properly for this.
parent_capability = Capability.load(db_info.capability_id)
if parent_capability:
self.parent_name = parent_capability.name
self.parent_description = parent_capability.description
else:
raise exception.CapabilityNotFound(
_("Somehow we got a datastore version capability without a "
"parent, that shouldn't happen. %s") % db_info.capability_id)
@property
def name(self):
"""
The name of the capability.
:returns str:
"""
return self.parent_name
@property
def description(self):
"""
The description of the capability.
:returns str:
"""
return self.parent_description
@property
def capability_id(self):
"""
Because capability overrides is an association table there are times
where having the capability id is necessary.
:returns str:
"""
return self.db_info.capability_id
@classmethod
def load(cls, capability_id):
"""
Generates a CapabilityOverride object from the capability_override id.
:returns CapabilityOverride:
"""
try:
return cls(DBCapabilityOverrides.find_by(
capability_id=capability_id))
except exception.ModelNotFoundError:
raise exception.CapabilityNotFound(
_("Capability Override not found for "
"capability %s") % capability_id)
@classmethod
def create(cls, capability, datastore_version_id, enabled):
"""
Create a new CapabilityOverride.
:param capability: The capability to be overridden for
this DS Version
:param datastore_version_id: The datastore version to apply the
override to.
:param enabled: Set enabled to True or False
:returns CapabilityOverride:
"""
return CapabilityOverride(
DBCapabilityOverrides.create(
capability_id=capability.id,
datastore_version_id=datastore_version_id,
enabled=enabled)
)
class Capability(BaseCapability):
@property
def name(self):
"""
The Capability name
:returns str:
"""
return self.db_info.name
@property
def description(self):
"""
The Capability description
:returns str:
"""
return self.db_info.description
@classmethod
def load(cls, capability_id_or_name):
"""
Generates a Capability object by looking up the capability first by
ID then by name.
:returns Capability:
"""
try:
return cls(DBCapabilities.find_by(id=capability_id_or_name))
except exception.ModelNotFoundError:
try:
return cls(DBCapabilities.find_by(name=capability_id_or_name))
except exception.ModelNotFoundError:
raise exception.CapabilityNotFound(
capability=capability_id_or_name)
@classmethod
def create(cls, name, description, enabled=False):
"""
Creates a new capability.
:returns Capability:
"""
return Capability(DBCapabilities.create(
name=name, description=description, enabled=enabled))
class Datastore(object):
def __init__(self, db_info):
self.db_info = db_info
@classmethod
def load(cls, id_or_name):
try:
return cls(DBDatastore.find_by(id=id_or_name))
except exception.ModelNotFoundError:
try:
return cls(DBDatastore.find_by(name=id_or_name))
except exception.ModelNotFoundError:
raise exception.DatastoreNotFound(datastore=id_or_name)
@property
def id(self):
return self.db_info.id
@property
def name(self):
return self.db_info.name
@property
def default_version_id(self):
return self.db_info.default_version_id
def delete(self):
self.db_info.delete()
class Datastores(object):
def __init__(self, db_info):
self.db_info = db_info
@classmethod
def load(cls, only_active=True):
datastores = DBDatastore.find_all()
if only_active:
datastores = datastores.join(DBDatastoreVersion).filter(
DBDatastoreVersion.active == 1)
return cls(datastores)
def __iter__(self):
for item in self.db_info:
yield item
class DatastoreVersion(object):
def __init__(self, db_info):
self._capabilities = None
self.db_info = db_info
self._datastore_name = None
@classmethod
def load(cls, datastore, id_or_name):
try:
return cls(DBDatastoreVersion.find_by(datastore_id=datastore.id,
id=id_or_name))
except exception.ModelNotFoundError:
versions = DBDatastoreVersion.find_all(datastore_id=datastore.id,
name=id_or_name)
if versions.count() == 0:
raise exception.DatastoreVersionNotFound(version=id_or_name)
if versions.count() > 1:
raise exception.NoUniqueMatch(name=id_or_name)
return cls(versions.first())
@classmethod
def load_by_uuid(cls, uuid):
try:
return cls(DBDatastoreVersion.find_by(id=uuid))
except exception.ModelNotFoundError:
raise exception.DatastoreVersionNotFound(version=uuid)
def delete(self):
self.db_info.delete()
@property
def id(self):
return self.db_info.id
@property
def datastore_id(self):
return self.db_info.datastore_id
@property
def datastore_name(self):
if self._datastore_name is None:
self._datastore_name = Datastore.load(self.datastore_id).name
return self._datastore_name
# TODO(tim.simpson): This would be less confusing if it was called
# "version" and datastore_name was called "name".
@property
def name(self):
return self.db_info.name
@property
def image_id(self):
return self.db_info.image_id
@property
def packages(self):
return self.db_info.packages
@property
def active(self):
return (True if self.db_info.active else False)
@property
def manager(self):
return self.db_info.manager
@property
def default(self):
datastore = Datastore.load(self.datastore_id)
return (datastore.default_version_id == self.db_info.id)
@property
def capabilities(self):
if self._capabilities is None:
self._capabilities = Capabilities.load(self.db_info.id)
return self._capabilities
class DatastoreVersions(object):
def __init__(self, db_info):
self.db_info = db_info
@classmethod
def load(cls, id_or_name, only_active=True):
datastore = Datastore.load(id_or_name)
if only_active:
versions = DBDatastoreVersion.find_all(datastore_id=datastore.id,
active=True)
else:
versions = DBDatastoreVersion.find_all(datastore_id=datastore.id)
return cls(versions)
@classmethod
def load_all(cls, only_active=True):
if only_active:
return cls(DBDatastoreVersion.find_all(active=True))
return cls(DBDatastoreVersion.find_all())
def __iter__(self):
for item in self.db_info:
yield item
def get_datastore_version(type=None, version=None, return_inactive=False):
datastore = type or CONF.default_datastore
if not datastore:
raise exception.DatastoreDefaultDatastoreNotFound()
datastore = Datastore.load(datastore)
version = version or datastore.default_version_id
if not version:
raise exception.DatastoreDefaultVersionNotFound(
datastore=datastore.name)
datastore_version = DatastoreVersion.load(datastore, version)
if datastore_version.datastore_id != datastore.id:
raise exception.DatastoreNoVersion(datastore=datastore.name,
version=datastore_version.name)
if not datastore_version.active and not return_inactive:
raise exception.DatastoreVersionInactive(
version=datastore_version.name)
return (datastore, datastore_version)
def update_datastore(name, default_version):
db_api.configure_db(CONF)
try:
datastore = DBDatastore.find_by(name=name)
except exception.ModelNotFoundError:
# Create a new one
datastore = DBDatastore()
datastore.id = utils.generate_uuid()
datastore.name = name
if default_version:
version = DatastoreVersion.load(datastore, default_version)
if not version.active:
raise exception.DatastoreVersionInactive(version=version.name)
datastore.default_version_id = version.id
else:
datastore.default_version_id = None
db_api.save(datastore)
def update_datastore_version(datastore, name, manager, image_id, packages,
active):
db_api.configure_db(CONF)
datastore = Datastore.load(datastore)
try:
version = DBDatastoreVersion.find_by(datastore_id=datastore.id,
name=name)
except exception.ModelNotFoundError:
# Create a new one
version = DBDatastoreVersion()
version.id = utils.generate_uuid()
version.name = name
version.datastore_id = datastore.id
version.manager = manager
version.image_id = image_id
version.packages = packages
version.active = active
db_api.save(version)
class DatastoreVersionMetadata(object):
@classmethod
def _datastore_version_metadata_add(cls, datastore_version_id,
key, value, exception_class):
"""Create an entry in the Datastore Version Metadata table."""
# Do we have a mapping in the db?
# yes: and its deleted then modify the association
# yes: and its not deleted then error on create
# no: then just create the new association
try:
db_record = DBDatastoreVersionMetadata.find_by(
datastore_version_id=datastore_version_id,
key=key, value=value)
if db_record.deleted == 1:
db_record.deleted = 0
db_record.updated_at = utils.utcnow()
db_record.save()
return
else:
raise exception_class(
datastore_version_id=datastore_version_id,
flavor_id=value)
except exception.NotFound:
pass
DBDatastoreVersionMetadata.create(
datastore_version_id=datastore_version_id,
key=key, value=value)
@classmethod
def _datastore_version_metadata_delete(cls, datastore_version_id,
key, value, exception_class):
try:
db_record = DBDatastoreVersionMetadata.find_by(
datastore_version_id=datastore_version_id,
key=key, value=value)
if db_record.deleted == 0:
db_record.delete()
return
else:
raise exception_class(
datastore_version_id=datastore_version_id,
flavor_id=value)
except exception.ModelNotFoundError:
raise exception_class(datastore_version_id=datastore_version_id,
flavor_id=value)
@classmethod
def add_datastore_version_flavor_association(cls, datastore_name,
datastore_version_name,
flavor_ids):
db_api.configure_db(CONF)
db_ds_record = DBDatastore.find_by(
name=datastore_name
)
db_datastore_id = db_ds_record.id
db_dsv_record = DBDatastoreVersion.find_by(
datastore_id=db_datastore_id,
name=datastore_version_name
)
datastore_version_id = db_dsv_record.id
for flavor_id in flavor_ids:
cls._datastore_version_metadata_add(
datastore_version_id, 'flavor', flavor_id,
exception.DatastoreFlavorAssociationAlreadyExists)
@classmethod
def delete_datastore_version_flavor_association(cls, datastore_name,
datastore_version_name,
flavor_id):
db_api.configure_db(CONF)
db_ds_record = DBDatastore.find_by(
name=datastore_name
)
db_datastore_id = db_ds_record.id
db_dsv_record = DBDatastoreVersion.find_by(
datastore_id=db_datastore_id,
name=datastore_version_name
)
datastore_version_id = db_dsv_record.id
cls._datastore_version_metadata_delete(
datastore_version_id, 'flavor', flavor_id,
exception.DatastoreFlavorAssociationNotFound)
@classmethod
def list_datastore_version_flavor_associations(cls, context,
datastore_type,
datastore_version_id):
if datastore_type and datastore_version_id:
"""
All nova flavors are permitted for a datastore_version unless
one or more entries are found in datastore_version_metadata,
in which case only those are permitted.
"""
(datastore, datastore_version) = get_datastore_version(
type=datastore_type, version=datastore_version_id)
# If datastore_version_id and flavor key exists in the
# metadata table return all the associated flavors for
# that datastore version.
nova_flavors = create_nova_client(context).flavors.list()
bound_flavors = DBDatastoreVersionMetadata.find_all(
datastore_version_id=datastore_version.id,
key='flavor', deleted=False
)
if (bound_flavors.count() != 0):
bound_flavors = tuple(f.value for f in bound_flavors)
# Generate a filtered list of nova flavors
ds_nova_flavors = (f for f in nova_flavors
if f.id in bound_flavors)
associated_flavors = tuple(flavor_model(flavor=item)
for item in ds_nova_flavors)
else:
# Return all nova flavors if no flavor metadata found
# for datastore_version.
associated_flavors = tuple(flavor_model(flavor=item)
for item in nova_flavors)
return associated_flavors
else:
msg = _("Specify both the datastore and datastore_version_id.")
raise exception.BadRequest(msg)
|
{
"content_hash": "c0c010f0f53dce10ba903b6507a86ce5",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 79,
"avg_line_length": 32.70353302611367,
"alnum_prop": 0.5881634570220761,
"repo_name": "fabian4/trove",
"id": "d23771a6f5f4bd1b526ca4b22cfca8df811f5428",
"size": "22020",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trove/datastore/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "88"
},
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60526"
},
{
"name": "Python",
"bytes": "3297002"
},
{
"name": "Shell",
"bytes": "15239"
},
{
"name": "XSLT",
"bytes": "50542"
}
],
"symlink_target": ""
}
|
from __future__ import division, absolute_import, print_function
import numpy as np
import unittest
from svgpathtools.bezier import *
from svgpathtools.path import bpoints2bezier
class HigherOrderBezier:
def __init__(self, bpoints):
self.bpts = bpoints
def bpoints(self):
return self.bpts
def point(self, t):
return bezier_point(self.bpoints(), t)
def __repr__(self):
return str(self.bpts)
def random_polynomial(degree):
return np.poly1d(np.random.rand(degree + 1))
def random_bezier(degree):
if degree <= 3:
return bpoints2bezier(polynomial2bezier(np.random.rand(degree + 1)))
else:
return HigherOrderBezier(np.random.rand(degree + 1))
class TestBezier2Polynomial(unittest.TestCase):
def test_bezier2polynomial(self):
tvals = np.linspace(0, 1, 10)
for d in range(1, 10):
b = random_bezier(d)
p = np.poly1d(bezier2polynomial(b.bpoints()))
for t in tvals:
msg = ("degree {}\nt = {}\nb(t) = {}\n = {}\np(t) = \n{}\n = {}"
"".format(d, t, b, b.point(t), p, p(t)))
self.assertAlmostEqual(b.point(t), p(t), msg=msg)
class TestPolynomial2Bezier(unittest.TestCase):
def test_polynomial2bezier(self):
tvals = np.linspace(0, 1, 10)
for d in range(1, 3):
p = random_polynomial(d)
b = HigherOrderBezier(polynomial2bezier(p))
for t in tvals:
msg = ("degree {}\nt = {}\nb(t) = {}\n = {}\np(t) = \n{}\n = {}"
"".format(d, t, b, b.point(t), p, p(t)))
self.assertAlmostEqual(b.point(t), p(t), msg=msg)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "9a64bd8f481af116024ea66aeb2d1b7b",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 80,
"avg_line_length": 30.103448275862068,
"alnum_prop": 0.56815578465063,
"repo_name": "jpcofr/svgpathtools",
"id": "4c82e93e83447edf4186ee7bbce239abb64a2e78",
"size": "1746",
"binary": false,
"copies": "2",
"ref": "refs/heads/thesis",
"path": "test/test_bezier.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "370367"
}
],
"symlink_target": ""
}
|
import os
import ycm_core
flags = [
'-x',
'c++',
'-D_REENTRANT',
'-I/usr/include/SDL2',
'-Wall',
'-std=c++11',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
{
"content_hash": "93a55602c9f1dd7b1f1a1ffd3aa267c5",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 79,
"avg_line_length": 30.10091743119266,
"alnum_prop": 0.6747942700396221,
"repo_name": "gentili/glogin",
"id": "baf666860f72d2ecb47ebcd811f4868ea377798a",
"size": "4741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".ycm_extra_conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "157"
},
{
"name": "C++",
"bytes": "5138"
},
{
"name": "Makefile",
"bytes": "321"
},
{
"name": "Python",
"bytes": "4741"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
setup(name='nlptoolkit',
version='0.1',
description='My minimal NLP tools for Biocreative IV contest',
author='Chia-Jung, Yang',
author_email='jeroyang@gmail.com',
packages=find_packages(),
url = "https://github.com/jeroyang/nlptoolkit",
)
|
{
"content_hash": "1c24db2ae5a8058bac92abb395207537",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 68,
"avg_line_length": 29.272727272727273,
"alnum_prop": 0.6708074534161491,
"repo_name": "jeroyang/nlptoolkit",
"id": "e772d096107323ec502e44a57847251d6d4b1499",
"size": "388",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3649"
}
],
"symlink_target": ""
}
|
"""Implementation of Unix-like stat command for cloud storage providers."""
from __future__ import absolute_import
import logging
import sys
from gslib.bucket_listing_ref import BucketListingObject
from gslib.cloud_api import AccessDeniedException
from gslib.cloud_api import NotFoundException
from gslib.command import Command
from gslib.command_argument import CommandArgument
from gslib.cs_api_map import ApiSelector
from gslib.exception import CommandException
from gslib.exception import InvalidUrlError
from gslib.storage_url import ContainsWildcard
from gslib.storage_url import StorageUrlFromString
from gslib.util import NO_MAX
from gslib.util import PrintFullInfoAboutObject
_SYNOPSIS = """
gsutil stat url...
"""
_DETAILED_HELP_TEXT = ("""
<B>SYNOPSIS</B>
""" + _SYNOPSIS + """
<B>DESCRIPTION</B>
The stat command will output details about the specified object URLs.
It is similar to running:
gsutil ls -L gs://some-bucket/some-object
but is more efficient because it avoids performing bucket listings and gets
the minimum necessary amount of object metadata. Moreover, because it avoids
performing bucket listings (which are eventually consistent) the gsutil stat
command provides a strongly consistent way to check for the existence (and
read the metadata) of an object.
The gsutil stat command will, however, perform bucket listings if you specify
URLs using wildcards.
If run with the gsutil -q option nothing will be printed, e.g.:
gsutil -q stat gs://some-bucket/some-object
This can be useful for writing scripts, because the exit status will be 0 for
an existing object and 1 for a non-existent object.
Note: Unlike the gsutil ls command, the stat command does not support
operations on sub-directories. For example, if you run the command:
gsutil -q stat gs://some-bucket/some-subdir/
gsutil will look for information about an object called "some-subdir/" (with a
trailing slash) inside the bucket "some-bucket", as opposed to operating on
objects nested under gs://some-bucket/some-subdir/. Unless you actually have
an object with that name, the operation will fail. However, you can use the
stat command on objects within subdirectories. For example, this command will
work as expected:
gsutil -q stat gs://some-bucket/some-subdir/file.txt
""")
# TODO: Add ability to stat buckets.
class StatCommand(Command):
"""Implementation of gsutil stat command."""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'stat',
command_name_aliases=[],
usage_synopsis=_SYNOPSIS,
min_args=1,
max_args=NO_MAX,
supported_sub_args='',
file_url_ok=False,
provider_url_ok=False,
urls_start_arg=0,
gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
gs_default_api=ApiSelector.JSON,
argparse_arguments=[
CommandArgument.MakeZeroOrMoreCloudURLsArgument()
]
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='stat',
help_name_aliases=[],
help_type='command_help',
help_one_line_summary='Display object status',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
def RunCommand(self):
"""Command entry point for stat command."""
# List of fields we'll print for stat objects.
stat_fields = ['updated', 'cacheControl', 'contentDisposition',
'contentEncoding', 'contentLanguage', 'size', 'contentType',
'componentCount', 'metadata', 'crc32c', 'md5Hash', 'etag',
'generation', 'metageneration']
found_nonmatching_arg = False
for url_str in self.args:
arg_matches = 0
url = StorageUrlFromString(url_str)
if not url.IsObject():
raise CommandException('The stat command only works with object URLs')
try:
if ContainsWildcard(url_str):
blr_iter = self.WildcardIterator(url_str).IterObjects(
bucket_listing_fields=stat_fields)
else:
single_obj = self.gsutil_api.GetObjectMetadata(
url.bucket_name, url.object_name, generation=url.generation,
provider=url.scheme, fields=stat_fields)
blr_iter = [BucketListingObject(url, root_object=single_obj)]
for blr in blr_iter:
if blr.IsObject():
arg_matches += 1
if logging.getLogger().isEnabledFor(logging.INFO):
PrintFullInfoAboutObject(blr, incl_acl=False)
except AccessDeniedException:
sys.stderr.write('You aren\'t authorized to read %s - skipping' %
url_str)
except InvalidUrlError:
raise
except NotFoundException:
pass
if not arg_matches:
sys.stderr.write('No URLs matched %s' % url_str)
found_nonmatching_arg = True
if found_nonmatching_arg:
return 1
return 0
|
{
"content_hash": "a7c30c53c27cc7681b3106a61baea847",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 80,
"avg_line_length": 35.856115107913666,
"alnum_prop": 0.6936195826645265,
"repo_name": "dtjackson/gsutil",
"id": "4d215f77f79a7c552499aa2aebe69359106e2182",
"size": "5604",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "gslib/commands/stat.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2007547"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TenenciaEntre'
db.create_table(u'monitoreo_tenenciaentre', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=250)),
))
db.send_create_signal(u'monitoreo', ['TenenciaEntre'])
# Adding model 'OpcionesDueno'
db.create_table(u'monitoreo_opcionesdueno', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=250)),
))
db.send_create_signal(u'monitoreo', ['OpcionesDueno'])
# Adding model 'TenenciaFamilia'
db.create_table(u'monitoreo_tenenciafamilia', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nombre', self.gf('django.db.models.fields.CharField')(max_length=250)),
))
db.send_create_signal(u'monitoreo', ['TenenciaFamilia'])
# Deleting field 'TenenciaEntrevistada.dueno'
db.delete_column(u'monitoreo_tenenciaentrevistada', 'dueno')
# Deleting field 'TenenciaEntrevistada.parcela'
db.delete_column(u'monitoreo_tenenciaentrevistada', 'parcela')
# Deleting field 'TenenciaEntrevistada.solar'
db.delete_column(u'monitoreo_tenenciaentrevistada', 'solar')
# Adding M2M table for field parcela on 'TenenciaEntrevistada'
m2m_table_name = db.shorten_name(u'monitoreo_tenenciaentrevistada_parcela')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenenciaentrevistada', models.ForeignKey(orm[u'monitoreo.tenenciaentrevistada'], null=False)),
('tenenciafamilia', models.ForeignKey(orm[u'monitoreo.tenenciafamilia'], null=False))
))
db.create_unique(m2m_table_name, ['tenenciaentrevistada_id', 'tenenciafamilia_id'])
# Adding M2M table for field solar on 'TenenciaEntrevistada'
m2m_table_name = db.shorten_name(u'monitoreo_tenenciaentrevistada_solar')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenenciaentrevistada', models.ForeignKey(orm[u'monitoreo.tenenciaentrevistada'], null=False)),
('tenenciaentre', models.ForeignKey(orm[u'monitoreo.tenenciaentre'], null=False))
))
db.create_unique(m2m_table_name, ['tenenciaentrevistada_id', 'tenenciaentre_id'])
# Adding M2M table for field dueno on 'TenenciaEntrevistada'
m2m_table_name = db.shorten_name(u'monitoreo_tenenciaentrevistada_dueno')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenenciaentrevistada', models.ForeignKey(orm[u'monitoreo.tenenciaentrevistada'], null=False)),
('opcionesdueno', models.ForeignKey(orm[u'monitoreo.opcionesdueno'], null=False))
))
db.create_unique(m2m_table_name, ['tenenciaentrevistada_id', 'opcionesdueno_id'])
# Deleting field 'Tenencia.dueno'
db.delete_column(u'monitoreo_tenencia', 'dueno')
# Deleting field 'Tenencia.parcela'
db.delete_column(u'monitoreo_tenencia', 'parcela')
# Deleting field 'Tenencia.solar'
db.delete_column(u'monitoreo_tenencia', 'solar')
# Adding M2M table for field parcela on 'Tenencia'
m2m_table_name = db.shorten_name(u'monitoreo_tenencia_parcela')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenencia', models.ForeignKey(orm[u'monitoreo.tenencia'], null=False)),
('tenenciafamilia', models.ForeignKey(orm[u'monitoreo.tenenciafamilia'], null=False))
))
db.create_unique(m2m_table_name, ['tenencia_id', 'tenenciafamilia_id'])
# Adding M2M table for field solar on 'Tenencia'
m2m_table_name = db.shorten_name(u'monitoreo_tenencia_solar')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenencia', models.ForeignKey(orm[u'monitoreo.tenencia'], null=False)),
('tenenciaentre', models.ForeignKey(orm[u'monitoreo.tenenciaentre'], null=False))
))
db.create_unique(m2m_table_name, ['tenencia_id', 'tenenciaentre_id'])
# Adding M2M table for field dueno on 'Tenencia'
m2m_table_name = db.shorten_name(u'monitoreo_tenencia_dueno')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('tenencia', models.ForeignKey(orm[u'monitoreo.tenencia'], null=False)),
('opcionesdueno', models.ForeignKey(orm[u'monitoreo.opcionesdueno'], null=False))
))
db.create_unique(m2m_table_name, ['tenencia_id', 'opcionesdueno_id'])
def backwards(self, orm):
# Deleting model 'TenenciaEntre'
db.delete_table(u'monitoreo_tenenciaentre')
# Deleting model 'OpcionesDueno'
db.delete_table(u'monitoreo_opcionesdueno')
# Deleting model 'TenenciaFamilia'
db.delete_table(u'monitoreo_tenenciafamilia')
# Adding field 'TenenciaEntrevistada.dueno'
db.add_column(u'monitoreo_tenenciaentrevistada', 'dueno',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Adding field 'TenenciaEntrevistada.parcela'
db.add_column(u'monitoreo_tenenciaentrevistada', 'parcela',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Adding field 'TenenciaEntrevistada.solar'
db.add_column(u'monitoreo_tenenciaentrevistada', 'solar',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Removing M2M table for field parcela on 'TenenciaEntrevistada'
db.delete_table(db.shorten_name(u'monitoreo_tenenciaentrevistada_parcela'))
# Removing M2M table for field solar on 'TenenciaEntrevistada'
db.delete_table(db.shorten_name(u'monitoreo_tenenciaentrevistada_solar'))
# Removing M2M table for field dueno on 'TenenciaEntrevistada'
db.delete_table(db.shorten_name(u'monitoreo_tenenciaentrevistada_dueno'))
# Adding field 'Tenencia.dueno'
db.add_column(u'monitoreo_tenencia', 'dueno',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Adding field 'Tenencia.parcela'
db.add_column(u'monitoreo_tenencia', 'parcela',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Adding field 'Tenencia.solar'
db.add_column(u'monitoreo_tenencia', 'solar',
self.gf('django.db.models.fields.IntegerField')(default=1),
keep_default=False)
# Removing M2M table for field parcela on 'Tenencia'
db.delete_table(db.shorten_name(u'monitoreo_tenencia_parcela'))
# Removing M2M table for field solar on 'Tenencia'
db.delete_table(db.shorten_name(u'monitoreo_tenencia_solar'))
# Removing M2M table for field dueno on 'Tenencia'
db.delete_table(db.shorten_name(u'monitoreo_tenencia_dueno'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'configuracion.areaaccion': {
'Meta': {'object_name': 'AreaAccion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'configuracion.plataforma': {
'Meta': {'object_name': 'Plataforma'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'sitio_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.SitioAccion']"})
},
u'configuracion.sector': {
'Meta': {'object_name': 'Sector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'configuracion.sitioaccion': {
'Meta': {'object_name': 'SitioAccion'},
'area_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.AreaAccion']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'lugar.comunidad': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Comunidad'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
u'lugar.departamento': {
'Meta': {'ordering': "['nombre']", 'object_name': 'Departamento'},
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'})
},
u'lugar.municipio': {
'Meta': {'ordering': "['departamento__nombre', 'nombre']", 'object_name': 'Municipio'},
'departamento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'extension': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'latitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'longitud': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True'})
},
u'lugar.pais': {
'Meta': {'object_name': 'Pais'},
'codigo': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'mapeo.organizaciones': {
'Meta': {'ordering': "[u'nombre']", 'unique_together': "((u'font_color', u'nombre'),)", 'object_name': 'Organizaciones'},
'area_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.AreaAccion']"}),
'contacto': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'correo_electronico': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'departamento': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'direccion': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'font_color': ('mapeo.models.ColorField', [], {'unique': 'True', 'max_length': '10', 'blank': 'True'}),
'fundacion': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'generalidades': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': (u'sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'municipio': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'plataforma': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.Plataforma']"}),
'rss': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'sector': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.Sector']"}),
'siglas': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'sitio_accion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['configuracion.SitioAccion']"}),
'sitio_web': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'telefono': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'temas': ('ckeditor.fields.RichTextField', [], {'null': 'True', 'blank': 'True'})
},
u'mapeo.persona': {
'Meta': {'object_name': 'Persona'},
'cedula': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'comunidad': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Comunidad']"}),
'departamento': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Departamento']"}),
'edad': ('django.db.models.fields.IntegerField', [], {}),
'finca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipio': ('smart_selects.db_fields.ChainedForeignKey', [], {'to': u"orm['lugar.Municipio']"}),
'nivel_educacion': ('django.db.models.fields.IntegerField', [], {}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizacion': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'org'", 'symmetrical': 'False', 'to': u"orm['mapeo.Organizaciones']"}),
'pais': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lugar.Pais']"}),
'sexo': ('django.db.models.fields.IntegerField', [], {})
},
u'monitoreo.casasolar': {
'Meta': {'object_name': 'CasaSolar'},
'casa': ('django.db.models.fields.IntegerField', [], {}),
'dueno': ('django.db.models.fields.IntegerField', [], {}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Encuesta']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tenencia': ('django.db.models.fields.IntegerField', [], {})
},
u'monitoreo.encuesta': {
'Meta': {'object_name': 'Encuesta'},
'fecha': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jefe': ('django.db.models.fields.IntegerField', [], {}),
'productor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['mapeo.Persona']"}),
'recolector': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Recolector']"}),
'tipo_encuesta': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'monitoreo.opcionesdueno': {
'Meta': {'object_name': 'OpcionesDueno'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'monitoreo.recolector': {
'Meta': {'object_name': 'Recolector'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'monitoreo.tenencia': {
'Meta': {'object_name': 'Tenencia'},
'dueno': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.OpcionesDueno']", 'symmetrical': 'False'}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Encuesta']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parcela': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.TenenciaFamilia']", 'symmetrical': 'False'}),
'solar': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.TenenciaEntre']", 'symmetrical': 'False'})
},
u'monitoreo.tenenciaentre': {
'Meta': {'object_name': 'TenenciaEntre'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'monitoreo.tenenciaentrevistada': {
'Meta': {'object_name': 'TenenciaEntrevistada'},
'dueno': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.OpcionesDueno']", 'symmetrical': 'False'}),
'encuesta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['monitoreo.Encuesta']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parcela': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.TenenciaFamilia']", 'symmetrical': 'False'}),
'solar': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['monitoreo.TenenciaEntre']", 'symmetrical': 'False'})
},
u'monitoreo.tenenciafamilia': {
'Meta': {'object_name': 'TenenciaFamilia'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '250'})
}
}
complete_apps = ['monitoreo']
|
{
"content_hash": "8bd5b0b2bd2424773790d7856f3be559",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 195,
"avg_line_length": 65.28939828080229,
"alnum_prop": 0.580839111735276,
"repo_name": "shiminasai/ciat_plataforma",
"id": "31397010bac945b50fbe48c05eec38cac4d7ca0c",
"size": "22810",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "monitoreo/monitoreo/migrations/0002_auto__add_tenenciaentre__add_opcionesdueno__add_tenenciafamilia__del_f.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "471726"
},
{
"name": "HTML",
"bytes": "1796979"
},
{
"name": "JavaScript",
"bytes": "1492281"
},
{
"name": "Python",
"bytes": "3447075"
}
],
"symlink_target": ""
}
|
from __future__ import division
import numpy as np
import LightFields
# def lf_to_depth(lightfield):
# """
# convert light field data into a 3D volume
# NOT YET IMPLEMENTED
# """
# pass
|
{
"content_hash": "39e74492f328f71d9d35ca0276971e67",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 45,
"avg_line_length": 13.2,
"alnum_prop": 0.6717171717171717,
"repo_name": "thanasi/pylifi",
"id": "ef1acc5f6b24f46b02f20ea8f78e5dd5f1f1ca37",
"size": "384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pylifi/Operators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13988"
}
],
"symlink_target": ""
}
|
import six
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova.objects import instance as obj_instance
from nova.virt import hardware
@base.NovaObjectRegistry.register
class RequestSpec(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: ImageMeta version 1.6
# Version 1.2: SchedulerRetries version 1.1
# Version 1.3: InstanceGroup version 1.10
# Version 1.4: ImageMeta version 1.7
VERSION = '1.4'
fields = {
'id': fields.IntegerField(),
'image': fields.ObjectField('ImageMeta', nullable=True),
'numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
'project_id': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'flavor': fields.ObjectField('Flavor', nullable=False),
'num_instances': fields.IntegerField(default=1),
'ignore_hosts': fields.ListOfStringsField(nullable=True),
'force_hosts': fields.ListOfStringsField(nullable=True),
'force_nodes': fields.ListOfStringsField(nullable=True),
'retry': fields.ObjectField('SchedulerRetries', nullable=True),
'limits': fields.ObjectField('SchedulerLimits', nullable=True),
'instance_group': fields.ObjectField('InstanceGroup', nullable=True),
# NOTE(sbauza): Since hints are depending on running filters, we prefer
# to leave the API correctly validating the hints per the filters and
# just provide to the RequestSpec object a free-form dictionary
'scheduler_hints': fields.DictOfListOfStringsField(nullable=True),
'instance_uuid': fields.UUIDField(),
}
@property
def vcpus(self):
return self.flavor.vcpus
@property
def memory_mb(self):
return self.flavor.memory_mb
@property
def root_gb(self):
return self.flavor.root_gb
@property
def ephemeral_gb(self):
return self.flavor.ephemeral_gb
@property
def swap(self):
return self.flavor.swap
def _image_meta_from_image(self, image):
if isinstance(image, objects.ImageMeta):
self.image = image
elif isinstance(image, dict):
# NOTE(sbauza): Until Nova is fully providing an ImageMeta object
# for getting properties, we still need to hydrate it here
# TODO(sbauza): To be removed once all RequestSpec hydrations are
# done on the conductor side and if the image is an ImageMeta
self.image = objects.ImageMeta.from_dict(image)
else:
self.image = None
def _from_instance(self, instance):
if isinstance(instance, obj_instance.Instance):
# NOTE(sbauza): Instance should normally be a NovaObject...
getter = getattr
elif isinstance(instance, dict):
# NOTE(sbauza): ... but there are some cases where request_spec
# has an instance key as a dictionary, just because
# select_destinations() is getting a request_spec dict made by
# sched_utils.build_request_spec()
# TODO(sbauza): To be removed once all RequestSpec hydrations are
# done on the conductor side
getter = lambda x, y: x.get(y)
else:
# If the instance is None, there is no reason to set the fields
return
instance_fields = ['numa_topology', 'pci_requests', 'uuid',
'project_id', 'availability_zone']
for field in instance_fields:
if field == 'uuid':
setattr(self, 'instance_uuid', getter(instance, field))
elif field == 'pci_requests':
self._from_instance_pci_requests(getter(instance, field))
elif field == 'numa_topology':
self._from_instance_numa_topology(getter(instance, field))
else:
setattr(self, field, getter(instance, field))
def _from_instance_pci_requests(self, pci_requests):
if isinstance(pci_requests, dict):
pci_req_cls = objects.InstancePCIRequests
self.pci_requests = pci_req_cls.from_request_spec_instance_props(
pci_requests)
else:
self.pci_requests = pci_requests
def _from_instance_numa_topology(self, numa_topology):
if isinstance(numa_topology, dict):
self.numa_topology = hardware.instance_topology_from_instance(
dict(numa_topology=numa_topology))
else:
self.numa_topology = numa_topology
def _from_flavor(self, flavor):
if isinstance(flavor, objects.Flavor):
self.flavor = flavor
elif isinstance(flavor, dict):
# NOTE(sbauza): Again, request_spec is primitived by
# sched_utils.build_request_spec() and passed to
# select_destinations() like this
# TODO(sbauza): To be removed once all RequestSpec hydrations are
# done on the conductor side
self.flavor = objects.Flavor(**flavor)
def _from_retry(self, retry_dict):
self.retry = (SchedulerRetries.from_dict(self._context, retry_dict)
if retry_dict else None)
def _populate_group_info(self, filter_properties):
if filter_properties.get('instance_group'):
# New-style group information as a NovaObject, we can directly set
# the field
self.instance_group = filter_properties.get('instance_group')
elif filter_properties.get('group_updated') is True:
# Old-style group information having ugly dict keys containing sets
# NOTE(sbauza): Can be dropped once select_destinations is removed
policies = list(filter_properties.get('group_policies'))
hosts = list(filter_properties.get('group_hosts'))
self.instance_group = objects.InstanceGroup(policies=policies,
hosts=hosts)
# hosts has to be not part of the updates for saving the object
self.instance_group.obj_reset_changes(['hosts'])
else:
# Set the value anyway to avoid any call to obj_attr_is_set for it
self.instance_group = None
def _from_limits(self, limits_dict):
self.limits = SchedulerLimits.from_dict(limits_dict)
def _from_hints(self, hints_dict):
if hints_dict is None:
self.scheduler_hints = None
return
self.scheduler_hints = {
hint: value if isinstance(value, list) else [value]
for hint, value in six.iteritems(hints_dict)}
@classmethod
def from_primitives(cls, context, request_spec, filter_properties):
"""Returns a new RequestSpec object by hydrating it from legacy dicts.
That helper is not intended to leave the legacy dicts kept in the nova
codebase, but is rather just for giving a temporary solution for
populating the Spec object until we get rid of scheduler_utils'
build_request_spec() and the filter_properties hydratation in the
conductor.
:param context: a context object
:param request_spec: An old-style request_spec dictionary
:param filter_properties: An old-style filter_properties dictionary
"""
num_instances = request_spec.get('num_instances', 1)
spec = cls(context, num_instances=num_instances)
# Hydrate from request_spec first
image = request_spec.get('image')
spec._image_meta_from_image(image)
instance = request_spec.get('instance_properties')
spec._from_instance(instance)
flavor = request_spec.get('instance_type')
spec._from_flavor(flavor)
# Hydrate now from filter_properties
spec.ignore_hosts = filter_properties.get('ignore_hosts')
spec.force_hosts = filter_properties.get('force_hosts')
spec.force_nodes = filter_properties.get('force_nodes')
retry = filter_properties.get('retry', {})
spec._from_retry(retry)
limits = filter_properties.get('limits', {})
spec._from_limits(limits)
spec._populate_group_info(filter_properties)
scheduler_hints = filter_properties.get('scheduler_hints', {})
spec._from_hints(scheduler_hints)
return spec
def get_scheduler_hint(self, hint_name, default=None):
"""Convenient helper for accessing a particular scheduler hint since
it is hydrated by putting a single item into a list.
In order to reduce the complexity, that helper returns a string if the
requested hint is a list of only one value, and if not, returns the
value directly (ie. the list). If the hint is not existing (or
scheduler_hints is None), then it returns the default value.
:param hint_name: name of the hint
:param default: the default value if the hint is not there
"""
if (not self.obj_attr_is_set('scheduler_hints')
or self.scheduler_hints is None):
return default
hint_val = self.scheduler_hints.get(hint_name, default)
return (hint_val[0] if isinstance(hint_val, list)
and len(hint_val) == 1 else hint_val)
def _to_legacy_image(self):
return base.obj_to_primitive(self.image) if (
self.obj_attr_is_set('image') and self.image) else {}
def _to_legacy_instance(self):
# NOTE(sbauza): Since the RequestSpec only persists a few Instance
# fields, we can only return a dict.
instance = {}
instance_fields = ['numa_topology', 'pci_requests',
'project_id', 'availability_zone', 'instance_uuid']
for field in instance_fields:
if not self.obj_attr_is_set(field):
continue
if field == 'instance_uuid':
instance['uuid'] = getattr(self, field)
else:
instance[field] = getattr(self, field)
flavor_fields = ['root_gb', 'ephemeral_gb', 'memory_mb', 'vcpus']
if not self.obj_attr_is_set('flavor'):
return instance
for field in flavor_fields:
instance[field] = getattr(self.flavor, field)
return instance
def _to_legacy_group_info(self):
# NOTE(sbauza): Since this is only needed until the AffinityFilters are
# modified by using directly the RequestSpec object, we need to keep
# the existing dictionary as a primitive.
return {'group_updated': True,
'group_hosts': set(self.instance_group.hosts),
'group_policies': set(self.instance_group.policies)}
def to_legacy_request_spec_dict(self):
"""Returns a legacy request_spec dict from the RequestSpec object.
Since we need to manage backwards compatibility and rolling upgrades
within our RPC API, we need to accept to provide an helper for
primitiving the right RequestSpec object into a legacy dict until we
drop support for old Scheduler RPC API versions.
If you don't understand why this method is needed, please don't use it.
"""
req_spec = {}
if not self.obj_attr_is_set('num_instances'):
req_spec['num_instances'] = self.fields['num_instances'].default
else:
req_spec['num_instances'] = self.num_instances
req_spec['image'] = self._to_legacy_image()
req_spec['instance_properties'] = self._to_legacy_instance()
if self.obj_attr_is_set('flavor'):
req_spec['instance_type'] = self.flavor
else:
req_spec['instance_type'] = {}
return req_spec
def to_legacy_filter_properties_dict(self):
"""Returns a legacy filter_properties dict from the RequestSpec object.
Since we need to manage backwards compatibility and rolling upgrades
within our RPC API, we need to accept to provide an helper for
primitiving the right RequestSpec object into a legacy dict until we
drop support for old Scheduler RPC API versions.
If you don't understand why this method is needed, please don't use it.
"""
filt_props = {}
if self.obj_attr_is_set('ignore_hosts') and self.ignore_hosts:
filt_props['ignore_hosts'] = self.ignore_hosts
if self.obj_attr_is_set('force_hosts') and self.force_hosts:
filt_props['force_hosts'] = self.force_hosts
if self.obj_attr_is_set('force_nodes') and self.force_nodes:
filt_props['force_nodes'] = self.force_nodes
if self.obj_attr_is_set('retry') and self.retry:
filt_props['retry'] = self.retry.to_dict()
if self.obj_attr_is_set('limits') and self.limits:
filt_props['limits'] = self.limits.to_dict()
if self.obj_attr_is_set('instance_group') and self.instance_group:
filt_props.update(self._to_legacy_group_info())
if self.obj_attr_is_set('scheduler_hints') and self.scheduler_hints:
# NOTE(sbauza): We need to backport all the hints correctly since
# we had to hydrate the field by putting a single item into a list.
filt_props['scheduler_hints'] = {hint: self.get_scheduler_hint(
hint) for hint in self.scheduler_hints}
return filt_props
@base.NovaObjectRegistry.register
class SchedulerRetries(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: ComputeNodeList version 1.14
VERSION = '1.1'
fields = {
'num_attempts': fields.IntegerField(),
# NOTE(sbauza): Even if we are only using host/node strings, we need to
# know which compute nodes were tried
'hosts': fields.ObjectField('ComputeNodeList'),
}
@classmethod
def from_dict(cls, context, retry_dict):
# NOTE(sbauza): We are not persisting the user context since it's only
# needed for hydrating the Retry object
retry_obj = cls()
if not ('num_attempts' and 'hosts') in retry_dict:
# NOTE(sbauza): We prefer to return an empty object if the
# primitive is not good enough
return retry_obj
retry_obj.num_attempts = retry_dict.get('num_attempts')
# NOTE(sbauza): each retry_dict['hosts'] item is a list of [host, node]
computes = [objects.ComputeNode(context=context, host=host,
hypervisor_hostname=node)
for host, node in retry_dict.get('hosts')]
retry_obj.hosts = objects.ComputeNodeList(objects=computes)
return retry_obj
def to_dict(self):
legacy_hosts = [[cn.host, cn.hypervisor_hostname] for cn in self.hosts]
return {'num_attempts': self.num_attempts,
'hosts': legacy_hosts}
@base.NovaObjectRegistry.register
class SchedulerLimits(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'numa_topology': fields.ObjectField('NUMATopologyLimits',
nullable=True,
default=None),
'vcpu': fields.IntegerField(nullable=True, default=None),
'disk_gb': fields.IntegerField(nullable=True, default=None),
'memory_mb': fields.IntegerField(nullable=True, default=None),
}
@classmethod
def from_dict(cls, limits_dict):
limits = cls(**limits_dict)
# NOTE(sbauza): Since the limits can be set for each field or not, we
# prefer to have the fields nullable, but default the value to None.
# Here we accept that the object is always generated from a primitive
# hence the use of obj_set_defaults exceptionally.
limits.obj_set_defaults()
return limits
def to_dict(self):
limits = {}
for field in self.fields:
if getattr(self, field) is not None:
limits[field] = getattr(self, field)
return limits
|
{
"content_hash": "0d85cb946ef7549a49546ad1d0880965",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 79,
"avg_line_length": 44.50273224043716,
"alnum_prop": 0.6218688605108055,
"repo_name": "devendermishrajio/nova",
"id": "12c6554d68f4121e669e7ad61fca7231870944ac",
"size": "16897",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/objects/request_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16836881"
},
{
"name": "Shell",
"bytes": "24210"
},
{
"name": "Smarty",
"bytes": "351433"
}
],
"symlink_target": ""
}
|
from insights.combiners.krb5 import AllKrb5Conf
from insights.parsers.krb5 import Krb5Configuration
from insights.tests import context_wrap
KRB5CONFIG = """
# Configuration snippets may be placed in this directory as well
includedir /etc/krb5.conf.d/
include /etc/krb5test.conf
module /etc/krb5test.conf:residual
[logging]
default = FILE:/var/log/krb5libs.log
kdc = FILE:/var/log/krb5kdc.log
[realms]
dns_lookup_realm = false
default_ccache_name = KEYRING:persistent:%{uid}
default_ccache_name2 = KEYRING:%{uid}:persistent
kdc_default_options = default.example.com
kdc_default_options = default2.example.com
EXAMPLE.COM = {
kdc = kerberos.example.com
admin_server = kerberos.example.com
auth_to_local = RULE:[1:$1@$0](.*@.*EXAMPLE.ORG)s/@.*//
}
EXAMPLE4.COM = {
kdc = kerberos.example4.com
admin_server = kerberos.example4.com
}
ticket_lifetime = 24h
# renew_lifetime = 7d
# forwardable = true
# rdns = false
""".strip()
KRB5CONFIG2 = """
# Configuration snippets may be placed in this directory as well
[realms]
dns_lookup_realm = false
ticket_lifetime = 24h
# default_ccache_name = KEYRING:persistent:%{uid}
EXAMPLE.COM = {
kdc = kerberos.example.com
kdc = test2.example.com
kdc = test3.example.com
admin_server = kerberos.example.com
}
[logging]
default = FILE:/var/log/krb5libs2.log
kdc = FILE:/var/log/krb5kdc2.log
admin_server = FILE:/var/log/kadmind2.log
[libdefaults]
dnsdsd = false
tilnvs = 24h
default_ccache_name = KEYRING:%{uid}:persistent
EXAMPLE2.COM = {
kdc = kerberos.example2.com
admin_server = kerberos.example2.com
}
EXAMPLE3.COM = {
kdc = kerberos.example3.com
admin_server = kerberos.example3.com *
}
""".strip()
def test_active_krb5_nest():
krb51 = Krb5Configuration(context_wrap(KRB5CONFIG, path='/etc/krb5.conf'))
krb52 = Krb5Configuration(context_wrap(KRB5CONFIG2, path='/etc/krb5.conf.d/test.conf'))
result = AllKrb5Conf([krb51, krb52])
assert result["logging"]["kdc"] == "FILE:/var/log/krb5kdc.log"
assert result.has_option("logging", "admin_server")
assert result["libdefaults"]["EXAMPLE2.COM"]["kdc"] == "kerberos.example2.com"
assert result["libdefaults"]["default_ccache_name"] == "KEYRING:%{uid}:persistent"
assert "realms" in result.sections()
assert "realmstest" not in result.sections()
assert result.has_section("realms")
assert not result.has_option("realms", "nosuchoption")
assert not result.has_option("nosucsection", "nosuchoption")
assert not result.options("realmsno")
assert result.options("logging") == ['default', 'admin_server', 'kdc']
assert result.include == ["/etc/krb5test.conf"]
assert result.includedir == ["/etc/krb5.conf.d/"]
assert result.module == ["/etc/krb5test.conf:residual"]
|
{
"content_hash": "2e620f1e1233ea295b3a44d95f09afff",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 91,
"avg_line_length": 31.10112359550562,
"alnum_prop": 0.7102601156069365,
"repo_name": "wcmitchell/insights-core",
"id": "f3a22315faacf33ce95e1049204cf2f08c4d70a9",
"size": "2768",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "insights/combiners/tests/test_krb5.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "19339"
},
{
"name": "Jupyter Notebook",
"bytes": "91793"
},
{
"name": "Python",
"bytes": "3414025"
},
{
"name": "Shell",
"bytes": "2274"
}
],
"symlink_target": ""
}
|
import os
import django
def populate():
alltags = {
'python': add_tag('python'),
'tutorial': add_tag('tutorial'),
'django': add_tag('django'),
'bottlepy': add_tag('bottlepy'),
'bottle': add_tag('bottle'),
'flask': add_tag('flask')
}
add_link(tags=[alltags['python'], alltags['tutorial']],
title="Official Python Tutorial",
url="http://docs.python.org/2/tutorial/")
add_link(tags=[alltags['python']],
title="How to Think like a Computer Scientist",
url="http://www.greenteapress.com/thinkpython/")
add_link(tags=[alltags['python'], alltags['tutorial']],
title="Learn Python in 10 Minutes",
url="http://www.korokithakis.net/tutorials/python/")
add_link(tags=[alltags['django'], alltags['tutorial']],
title="Official Django Tutorial",
url="https://docs.djangoproject.com/en/1.5/intro/tutorial01/")
add_link(tags=[alltags['django']],
title="Django Rocks",
url="http://www.djangorocks.com/")
add_link(tags=[alltags['django'], alltags['tutorial']],
title="How to Tango with Django",
url="http://www.tangowithdjango.com/")
add_link(tags=[alltags['bottlepy'], alltags['bottle']],
title="Bottle",
url="http://bottlepy.org/docs/dev/")
add_link(tags=[alltags['flask'], alltags['python']],
title="Flask",
url="http://flask.pocoo.org")
# Print out what we have added to the user.
for l in Link.objects.all():
print "Link: {0}".format(str(l))
print l.tags.all()
def add_link(tags, title, url):
l = Link.objects.get_or_create(title=title, url=url)[0]
for tag in tags:
l.tags.add(tag)
return l
def add_tag(name):
t = Tag.objects.get_or_create(name=name)[0]
return t
# Start execution here!
if __name__ == '__main__':
print "Starting Rango population script..."
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bookmarks.settings')
django.setup()
from main.models import Link, Tag
populate()
|
{
"content_hash": "2be875c7453fcbf8b5987da52441531a",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 73,
"avg_line_length": 30.865671641791046,
"alnum_prop": 0.6058994197292069,
"repo_name": "gmeyer1/CMPUT410lab7",
"id": "0fd77a35f5ebab5a7aae136540d2b017664219c7",
"size": "2068",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "bookmarks/populate_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4668"
},
{
"name": "HTML",
"bytes": "8148"
},
{
"name": "JavaScript",
"bytes": "386"
},
{
"name": "Python",
"bytes": "10603"
}
],
"symlink_target": ""
}
|
from dnutils import logs
from .infer import Inference
from .exact import EnumerationAsk
logger = logs.getlogger(__name__)
class IPFPM(Inference):
""" the iterative proportional fitting procedure applied at the model level (IPFP-M) """
def __init__(self, mrf):
# check if there's any soft evidence to actually work on
if len(mrf.getSoftEvidence()) == 0:
raise Exception("Application of IPFP-M inappropriate! IPFP-M is a wrapper method for other inference algorithms that allows to fit probability constraints. An application is not sensical if the model contains no such constraints.")
Inference.__init__(self, mrf)
def _infer(self, verbose=True, details=False, fittingMethod=EnumerationAsk, fittingThreshold=1e-3,
fittingSteps=100, fittingParams=None, maxThreshold=None, greedy=False, **args):
# add formulas to the model whose weights we can then fit
if verbose: logger.info("extending model with %d formulas whose weights will be fit..." % len(self.mrf.getSoftEvidence()))
for req in self.mrf.getSoftEvidence():
formula = self.mln.logic.parseFormula(req["expr"])
idxFormula = self.mrf._addFormula(formula, 0.0)
gndFormula = formula.ground(self.mrf, {})
self.mrf._addGroundFormula(gndFormula, idxFormula)
req["gndExpr"] = req["expr"]
req["gndFormula"] = gndFormula
req["idxFormula"] = idxFormula
# do fitting
if fittingParams is None: fittingParams = {}
fittingParams.update(args)
results, self.data = self.mrf._fitProbabilityConstraints(self.mrf.getSoftEvidence(), fittingMethod=fittingMethod,
fittingThreshold=fittingThreshold, fittingSteps=fittingSteps,
given=self.given, queries=self.queries, verbose=details,
fittingParams=fittingParams, maxThreshold=maxThreshold, greedy=greedy)
return results
|
{
"content_hash": "6a59c0dcd7d06167616d904328800047",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 243,
"avg_line_length": 54.425,
"alnum_prop": 0.6132292145153881,
"repo_name": "danielnyga/pracmln",
"id": "74278cf56105d8b2935c1d85b1eb55b831f3e5bd",
"size": "3339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python3/pracmln/mln/inference/ipfpm.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "16327"
},
{
"name": "CMake",
"bytes": "9841"
},
{
"name": "Java",
"bytes": "101"
},
{
"name": "Makefile",
"bytes": "42"
},
{
"name": "Python",
"bytes": "1659815"
},
{
"name": "Shell",
"bytes": "188"
},
{
"name": "TeX",
"bytes": "243"
}
],
"symlink_target": ""
}
|
from msrest.serialization import Model
class SecurityGroupViewResult(Model):
"""The information about security rules applied to the specified VM.
:param network_interfaces: List of network interfaces on the specified VM.
:type network_interfaces:
list[~azure.mgmt.network.v2017_09_01.models.SecurityGroupNetworkInterface]
"""
_attribute_map = {
'network_interfaces': {'key': 'networkInterfaces', 'type': '[SecurityGroupNetworkInterface]'},
}
def __init__(self, *, network_interfaces=None, **kwargs) -> None:
super(SecurityGroupViewResult, self).__init__(**kwargs)
self.network_interfaces = network_interfaces
|
{
"content_hash": "05dd70df4c5983cc2f48a5ffdfef8cc3",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 102,
"avg_line_length": 37.27777777777778,
"alnum_prop": 0.7034277198211625,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "f577cd5e908bc9436c5f3a33bfb87e6ae8f87bd4",
"size": "1145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/security_group_view_result_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
}
|
'''
XXX missing doc string
'''
class NoResponseError(Exception):
pass
class UsageError(Exception):
pass
class PlayerNotFound(Exception):
pass
class ServerPasswordNotSet(Exception):
pass
|
{
"content_hash": "e7b338e68d4102d3ba74225ca569ebc4",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 38,
"avg_line_length": 12.235294117647058,
"alnum_prop": 0.7211538461538461,
"repo_name": "JnyJny/PyRcon",
"id": "2eecb2e4600e0f1adf92f42491ee209602d43645",
"size": "208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyRcon/Exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28380"
}
],
"symlink_target": ""
}
|
from pysnmp.proto import errind, error
class AbstractAuthenticationService:
serviceID = None
def hashPassphrase(self, authKey):
raise error.ProtocolError(errind.noAuthentication)
def localizeKey(self, authKey, snmpEngineID):
raise error.ProtocolError(errind.noAuthentication)
# 7.2.4.1
def authenticateOutgoingMsg(self, authKey, wholeMsg):
raise error.ProtocolError(errind.noAuthentication)
# 7.2.4.2
def authenticateIncomingMsg(self, authKey, authParameters, wholeMsg):
raise error.ProtocolError(errind.noAuthentication)
|
{
"content_hash": "2ad6c1964ddf27e5ef9ea922a20c97c6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 73,
"avg_line_length": 32.611111111111114,
"alnum_prop": 0.7427597955706985,
"repo_name": "mith1979/ansible_automation",
"id": "4f57559d90288027328bc98fe029f43f4e117793",
"size": "734",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "applied_python/applied_python/lib/python2.7/site-packages/pysnmp/proto/secmod/rfc3414/auth/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1005"
},
{
"name": "C",
"bytes": "84868"
},
{
"name": "CSS",
"bytes": "50289"
},
{
"name": "HTML",
"bytes": "70428"
},
{
"name": "JavaScript",
"bytes": "105262"
},
{
"name": "PowerShell",
"bytes": "51840"
},
{
"name": "Python",
"bytes": "19073705"
},
{
"name": "Shell",
"bytes": "3747"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
}
|
import concurrent
import copy
import functools
import importlib
import itertools
import logging
import os
import sys
logger = logging.getLogger(__name__)
def update(dictionary, other = (), **kwargs):
'''Update ``dictionary`` and return result.
Used dict.update, and those semantics apply. We extend that behavior by
returning the resulting dictionary rather than simply relying on in-place
updates.
Parameters
----------
:``other``: other dictioanry to integrate with ``dictionary``; **optional**
:``kwargs``: keys and values to add to ``dictionary``
Return Value(s)
---------------
Updated dictionary with all keys from ``dictionary`` and ``other`` and
``kwargs``.
'''
dictionary.update(other, **kwargs)
return dictionary
def fixtures_from_classes(fixture_classes, context = None):
'''Retrieve fixtures for the given classes.
Parameters
----------
:``fixture_classes``: classes whose children are instantiated as fixtures
:``context``: testing context
Return Value(s)
---------------
Instantiated fixture objects.
'''
classes = list(copy.copy(fixture_classes))
fixtures = []
while len(classes):
current = classes.pop()
logger.debug('current: %s', current)
logger.debug('current.__subclasses__(): %s', current.__subclasses__())
if len(current.__subclasses__()):
classes.extend(current.__subclasses__())
elif current.__name__.startswith('f_'):
fixtures.append(current(context))
else:
logger.info('non-fixture leaf class: %s', current)
return fixtures
def import_directory(module_basename, directory, update_path = False, sort_key = None):
'''Recursively import all modules in a directory.
Parameters
----------
:``module_basename``: module name prefix for loaded modules
:``directory``: directory to recursively import modules from
:``update_path``: if true, system path for modules is updated to include
directory; otherwise, system path is unmodified
:``sort_key``: function to order imports of modules in this directory
'''
if update_path:
update_path = bool(sys.path.count(directory))
sys.path.append(directory)
logger.info('loading submodules of %s', module_basename)
logger.info('loading modules from %s', directory)
file_names = itertools.chain(*[ [ os.path.join(directory_path, file_name) for file_name in file_names ] for directory_path, directory_names, file_names in os.walk(directory) if len(file_names) ])
module_names = []
for file_name in file_names:
if file_name.endswith('.py'):
name = file_name
name = name.replace(directory, '')
name = name.replace('__init__', '')
name = name.replace('.py', '')
name = name.replace('/', '.')
name = name.strip('.')
if not len(name):
continue
name = module_basename + '.' + name
known_parts = set()
name = '.'.join([ part for part in name.split(',') if part not in known_parts and not known_parts.add(part) ])
if len(name):
module_names.append(name)
for module_name in sorted(module_names, key = sort_key):
try:
importlib.import_module(module_name)
except ImportError:
logger.exception('NOT loaded %s', module_name)
else:
logger.info('loaded %s', module_name)
if update_path:
sys.path.remove(directory)
def mock(name):
def _(function):
@functools.wraps(function)
def wrapper(self, *args, **kwargs):
logger.info('STARTING: mock ' + name)
result = False
if name in self.mocks_mask:
logger.info('STOPPING: mock ' + name + '—MASKED')
elif getattr(self, 'is_mocked_' + name.replace('.', '_').strip('_'), False):
result = True
logger.info('STOPPING: mock ' + name + '—EXISTS')
else:
function(self, *args, **kwargs)
result = True
logger.info('STOPPING: mock ' + name)
setattr(self, 'is_mocked_' + name.replace('.', '_').strip('_'), result)
return result
return wrapper
return _
def wrap_in_future(thing):
'''Wrap thing in a future.
Arguments
---------
:``thing``: thing to be wrapped in future
Return Value(s)
---------------
future whose result is thing.
'''
future = concurrent.futures.Future()
future.set_result(thing)
return future
|
{
"content_hash": "1d17bf7789dcae75df47b15029ba8c19",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 199,
"avg_line_length": 26.331491712707184,
"alnum_prop": 0.5778430549727235,
"repo_name": "alunduil/muniments",
"id": "29b898705a178600fd351f4335ed4bc1487b5cb9",
"size": "4979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_muniments/test_helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60386"
}
],
"symlink_target": ""
}
|
import sys
import os
__all__ = [u'project_configuration']
class LazyConfiguration(object):
def __init__(self):
self._configuration = None
def _get_confuguration_class(self):
from jetee.runtime.app import dispatcher
sys.path.insert(0, os.getcwd())
try:
configuration_module = __import__(dispatcher.args.configuration_module)
except ImportError, e:
if e.message == u'No module named {}'.format(dispatcher.args.configuration_module):
print(u'Configuration module "{}" not found, make sure it is in sys.path.'.format(
dispatcher.args.configuration_module))
exit()
raise
try:
project_configuration_class = getattr(configuration_module, dispatcher.args.configuration_name)
except AttributeError:
print(u'Cannot find configuration class "{}" in "{}" module.'.format(dispatcher.args.configuration_name,
dispatcher.args.configuration_module))
exit()
else:
return project_configuration_class
def set_configuration(self, configuration_class):
self._configuration = configuration_class()
def __getattr__(self, item):
if self._configuration is None:
self.set_configuration(self._get_confuguration_class())
return getattr(self._configuration, item)
project_configuration = LazyConfiguration()
|
{
"content_hash": "2a85e4093c3d48db24a29571abe39fba",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 119,
"avg_line_length": 37.073170731707314,
"alnum_prop": 0.6059210526315789,
"repo_name": "WhackoJacko/Jetee",
"id": "cd7cc014f03cf617089dd168fdc09663a296a001",
"size": "1520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jetee/runtime/configuration.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "82508"
}
],
"symlink_target": ""
}
|
__author__ = 'satish'
from xml.dom import minidom
import pickle
'''
Feature List:
title
gd:feedLink --- countHint
media:description
yt:duration --- seconds
gd:rating --- average --- max --- numRaters
yt:statistics (--- favoriteCount) --- viewCount
yt:rating --- numDislikes --- numLikes
Feature Tuple:
(title,mediaDescription,
(ratingAverage,ratingMax,ratingnNumRaters),(statisticsViewCount,ratingNumDislikes,ratingNumLikes),
countHint,durationSeconds)
AND Comments from Different location
'''
path = "./"
def save_obj(obj, name ):
with open( name + '.pkl', 'wb') as f:
pickle.dump(obj, f, protocol=2)
def load_obj(name ):
with open( name + '.pkl', 'rb') as f:
return pickle.load(f)
RankedIDs = load_obj("rankedVidIds")
DesignMatrix = dict()
title = ""
mediaDescription = ""
comment = []
for ID in RankedIDs:
try:
doc = minidom.parse(path + "Data/Meta/"+ID.strip()+".txt")
# Title from Meta
title = doc.getElementsByTagName("title")[0].firstChild.nodeValue
# Description
try:
mediaDescription = doc.getElementsByTagName("media:description")[0].firstChild.nodeValue
except:
mediaDescription = "NONE"
except:
print ('No Title :(')
print("Trying Comments ! ")
title = "NONE"
mediaDescription = "NONE"
try:
com = minidom.parse(path + "Data/Comments/"+ID.strip()+".txt")
# Comments
comment = [c.firstChild.nodeValue for c in com.getElementsByTagName("content")]
except:
print("No Comments :(")
if title == "NONE" and mediaDescription == "NONE":
print("Nothing :O, SKIP")
print()
continue
comment = []
DesignMatrix[ID] = [title,mediaDescription,comment]
print("Got !")
print()
print(len(DesignMatrix))
save_obj(DesignMatrix,"DesignMatrix")
|
{
"content_hash": "2ef822a942fa1a650199a1612f54e064",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 100,
"avg_line_length": 23.974683544303797,
"alnum_prop": 0.6246040126715945,
"repo_name": "tpsatish95/Youtube-Comedy-Comparison",
"id": "8ad3e6a92825c275930e38e8dc15c3215d4b0766",
"size": "1894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Project/getFeatures.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "112935"
}
],
"symlink_target": ""
}
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def ClusterConfigSpec(vim, *args, **kwargs):
'''A complete cluster configuration. All fields are defined as optional. In case
of a reconfiguration, unset fields are unchanged.'''
obj = vim.client.factory.create('ns0:ClusterConfigSpec')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'dasConfig', 'dasVmConfigSpec', 'drsConfig', 'drsVmConfigSpec', 'rulesSpec',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
{
"content_hash": "f4ec2f43bee4d1f0340111cd74542752",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 124,
"avg_line_length": 33.88235294117647,
"alnum_prop": 0.609375,
"repo_name": "xuru/pyvisdk",
"id": "6b20f8126884936df9f05ab161770766fbb0a60d",
"size": "1153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyvisdk/do/cluster_config_spec.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "369"
},
{
"name": "Python",
"bytes": "3037849"
},
{
"name": "Shell",
"bytes": "4517"
}
],
"symlink_target": ""
}
|
import json, requests, hashlib
from requests.auth import HTTPBasicAuth
config = json.load(open("config.json"))
philds = {
"email_addr":"emails",
"phone_num":"phones",
"bitcoin_addr":"bitcoins",
"pgp_key_ls":"pgps",
"pgp_key_hash": "pgp hashes",
"org": "organizations",
"person_name":"people",
"gpe": "gpes",
"pgp_email_addr":"pgp emails",
"ssn_num":"ssns",
"onion_appearance":"onions"
}
def star_search(data):
try:
headers = {'content-type': 'application/json'}
resp = requests.post(config["star_search_url"], auth=HTTPBasicAuth(config["star_search_auth_u"], config["star_search_auth_p"]),data = json.dumps(data), verify=False, headers=headers)
results = json.loads(json.JSONDecoder().decode(resp.text))
ret_results = {}
for field in philds:
temp_res = results.get(field,{}).keys()
temp_res = list(map(lambda x: str(x.replace("u'","").replace("'","")),temp_res))
ret_results[philds[field]] = list(map(lambda x:{"id":hashlib.md5(x).hexdigest(),"value":x},temp_res))
return ret_results
except:
return {}
|
{
"content_hash": "76b6436c264b798bbb0a54a1bbfb21b4",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 188,
"avg_line_length": 34.375,
"alnum_prop": 0.6318181818181818,
"repo_name": "jgawrilo/butler_server",
"id": "9bfc55936af293eeee588f7960de830bbea314d6",
"size": "1100",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sri_service.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "76798"
},
{
"name": "Shell",
"bytes": "29"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
try: # new import added in Django 1.7
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.fields import GenericRelation
except ImportError:
from django.contrib.contenttypes import generic
GenericForeignKey = generic.GenericForeignKey
GenericRelation = generic.GenericRelation
import dumper
class LoggingModel(models.Model):
text = models.CharField(max_length=200)
def __unicode__(self):
return self.text
class SimpleModel(models.Model):
slug = models.CharField(max_length=200, default='slug')
def get_absolute_url(self):
return reverse('simple-detail', kwargs={'slug': self.slug})
def dependent_paths(self):
yield self.get_absolute_url()
for model in self.related_set.all():
yield model.get_absolute_url()
class RelatedModel(models.Model):
slug = models.CharField(max_length=200, default='slug')
related = models.ManyToManyField(SimpleModel, related_name='related_set')
def dependent_paths(self):
yield self.get_absolute_url()
def get_absolute_url(self):
return reverse('related-detail', kwargs={'slug': self.slug})
class GenericRelationModel(models.Model):
slug = models.CharField(max_length=200, default='slug')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def dependent_paths(self):
yield self.content_object.get_absolute_url()
class RelatedToGenericModel(models.Model):
slug = models.CharField(max_length=200, default='slug')
generic_related = GenericRelation(GenericRelationModel)
def get_absolute_url(self):
return reverse('related-to-generic-detail', kwargs={'slug': self.slug})
class GenericRelationNotRegisteredModel(models.Model):
slug = models.CharField(max_length=200, default='slug')
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def dependent_paths(self):
pass
dumper.register(SimpleModel)
dumper.register(RelatedModel)
dumper.register(GenericRelationModel)
|
{
"content_hash": "9025fe198e4288f27c3fb9ae20365cb6",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 31.32894736842105,
"alnum_prop": 0.7303653926921462,
"repo_name": "saulshanabrook/django-dumper",
"id": "8d5580d54e3396f66169806cc2638644edb49513",
"size": "2381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24004"
}
],
"symlink_target": ""
}
|
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
class ListTransactionsTest(BitcoinTestFramework):
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
check_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
check_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
check_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { self.nodes[0].getnewaddress() : 0.11,
self.nodes[1].getnewaddress() : 0.22,
self.nodes[0].getaccountaddress("") : 0.33,
self.nodes[1].getaccountaddress("") : 0.44 }
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
check_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
check_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : ""} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
check_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : ""} )
if __name__ == '__main__':
ListTransactionsTest().main()
|
{
"content_hash": "25bf9bed75513c1bdbcf8b5125c00af8",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 105,
"avg_line_length": 48.37362637362637,
"alnum_prop": 0.5070422535211268,
"repo_name": "bitcoinsSG/zcash",
"id": "4df8d795d051004b5ee32a245b9a1379f9464e50",
"size": "4649",
"binary": false,
"copies": "2",
"ref": "refs/heads/zc.v0.11.2.latest",
"path": "qa/rpc-tests/listtransactions.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "432675"
},
{
"name": "C++",
"bytes": "4662462"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "137046"
},
{
"name": "Makefile",
"bytes": "85469"
},
{
"name": "Objective-C",
"bytes": "3277"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "454390"
},
{
"name": "QMake",
"bytes": "2019"
},
{
"name": "Shell",
"bytes": "82747"
}
],
"symlink_target": ""
}
|
from django.db import transaction
from django.dispatch import Signal
try:
import thread
except ImportError:
import dummy_thread as thread
class BadlyBehavedTransactionSignalHandlerError(Exception):
'''
Exception raised when a post_commit or post_rollback handler updates the
current transaction and doesn't perform its own commit/rollback. This is
usually easily mitigated by using a wrapper like @commit_on_success.
Also see the defer() function in this module for another approach that
avoids this error.
'''
pass
class ThreadSignals(object):
def __init__(self):
self.post_commit = Signal()
self.post_rollback = Signal()
class TransactionSignals(object):
signals = {}
def _has_signals(self):
thread_ident = thread.get_ident()
return thread_ident in self.signals
def _init_signals(self):
thread_ident = thread.get_ident()
assert thread_ident not in self.signals
self.signals[thread_ident] = ThreadSignals()
return self.signals[thread_ident]
def _remove_signals(self):
thread_ident = thread.get_ident()
assert thread_ident in self.signals
del self.signals[thread_ident]
def _get_signals(self):
thread_ident = thread.get_ident()
assert thread_ident in self.signals
return self.signals[thread_ident]
def _get_or_init_signals(self):
if self._has_signals():
return self._get_signals()
else:
return self._init_signals()
def _send_post_commit(self):
if self._has_signals():
_signals = self._get_signals()
self._remove_signals()
_signals.post_commit.send(sender=transaction)
# Take care of badly behaved signal handlers that have
# dirtied the transaction without committing properly
if transaction.is_dirty():
raise BadlyBehavedTransactionSignalHandlerError
def _send_post_rollback(self):
if self._has_signals():
_signals = self._get_signals()
self._remove_signals()
_signals.post_rollback.send(sender=transaction)
# Take care of badly behaved signal handlers that have
# dirtied the transaction without committing properly
if transaction.is_dirty():
raise BadlyBehavedTransactionSignalHandlerError
def _on_exit_without_update(self):
'''
Clear signals on transaction exit, even if neither commit nor rollback
happened.
'''
if self._has_signals():
self._remove_signals()
@property
def post_commit(self):
return self._get_or_init_signals().post_commit
@property
def post_rollback(self):
return self._get_or_init_signals().post_rollback
transaction.signals = TransactionSignals()
def managed(*args, **kwargs):
to_commit = False
flag = kwargs.get('flag', True)
if not flag and transaction.is_dirty():
to_commit = True
old_managed(*args, **kwargs)
if to_commit:
transaction.signals._send_post_commit()
else:
transaction.signals._on_exit_without_update()
old_managed = transaction.managed
transaction.managed = managed
def commit_unless_managed(*args, **kwargs):
old_commit_unless_managed(*args, **kwargs)
if not transaction.is_managed():
transaction.signals._send_post_commit()
old_commit_unless_managed = transaction.commit_unless_managed
transaction.commit_unless_managed = commit_unless_managed
def rollback_unless_managed(*args, **kwargs):
old_rollback_unless_managed(*args, **kwargs)
if not transaction.is_managed():
transaction.signals._send_post_rollback()
old_rollback_unless_managed = transaction.rollback_unless_managed
transaction.rollback_unless_managed = rollback_unless_managed
# If post_commit or post_rollback signal handlers put the transaction in a
# dirty state, they must handle their own commits/rollbacks.
def commit(*args, **kwargs):
old_commit(*args, **kwargs)
transaction.signals._send_post_commit()
old_commit = transaction.commit
transaction.commit = commit
def rollback(*args, **kwargs):
old_rollback(*args, **kwargs)
transaction.signals._send_post_rollback()
old_rollback = transaction.rollback
transaction.rollback = rollback
def defer(f, *args, **kwargs):
'''
Wrapper that defers a function's execution until the current transaction
commits, if a transaction is active. Otherwise, executes as usual. Note
that a deferred function will NOT be called if the transaction completes
without committing (e.g. when transaction.is_dirty() is False upon exiting
the transaction).
An implicit assumption is that a deferred function does not return an
important value, since there is no way to retrieve the return value in
the normal execution order.
Before being connected to the 'post_commit' signal of an existing managed
transaction, the deferred function is wrapped by the @commit_on_success
decorator to ensure that it behaves properly by committing or rolling back
any updates it makes to a current transaction.
>>> def log_success(msg):
>>> print 'logging success'
>>> LOG.info(msg)
>>>
>>> @transaction.commit_on_success
>>> def transactional_update(value)
>>> print 'starting transaction'
>>> ... perform update ...
>>> defer(log_success, 'The transaction was successful')
>>> print 'finishing transaction'
>>>
>>> transactional_update('foo')
... starting transaction
... finishing transaction
... logging success
'''
if transaction.is_managed():
@transaction.commit_on_success
def f_deferred(*a, **kw):
f(*args, **kwargs)
transaction.signals.post_commit.connect(f_deferred, weak=False)
else:
f(*args, **kwargs)
|
{
"content_hash": "b7da04c7b8f7b9716aac07e95a5f6ee2",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 78,
"avg_line_length": 32.41847826086956,
"alnum_prop": 0.667393126571668,
"repo_name": "davehughes/django-transaction-signals",
"id": "68921ace0aac8988c1cc0cf36a5bf91f81e2ed29",
"size": "6191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_transaction_signals/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6558"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# Create your models here.
class StorageLocation(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Category(models.Model):
class Meta:
verbose_name_plural = "categories"
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Unit(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Conversion(models.Model):
firstUnit = models.ForeignKey(Unit, related_name='first_unit')
secondUnit = models.ForeignKey(Unit, related_name='second_unit')
ratio = models.FloatField()
class Food(models.Model):
name = models.CharField(max_length=50)
category = models.ManyToManyField(Category)
# If we break food up, so you can have multiple inventories of a food type
# this is a natural breaking point
location = models.ForeignKey(StorageLocation)
quantity = models.IntegerField(default=1)
unit = models.ForeignKey(Unit)
best_by = models.DateField(blank=True, null=True)
def __str__(self):
return self.name
|
{
"content_hash": "4067fd7ea82d08c5753c7e55ec493ff9",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 78,
"avg_line_length": 27.795454545454547,
"alnum_prop": 0.6925592804578904,
"repo_name": "Feasoron/red-squirrel",
"id": "3b7efab4eea6df9b2fe465b0ea498660605a2e1f",
"size": "1223",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "red_squirrel/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "9784"
}
],
"symlink_target": ""
}
|
"""
Code for computing edit distances.
"""
import sys
import operator
from typing import Optional, Sequence
INSERT: str = "insert"
DELETE: str = "delete"
EQUAL: str = "equal"
REPLACE: str = "replace"
# Cost is basically: was there a match or not.
# The other numbers are cumulative costs and matches.
def lowest_cost_action(ic, dc, sc, im, dm, sm, cost) -> str:
"""Given the following values, choose the action (insertion, deletion,
or substitution), that results in the lowest cost (ties are broken using
the 'match' score). This is used within the dynamic programming algorithm.
* ic - insertion cost
* dc - deletion cost
* sc - substitution cost
* im - insertion match (score)
* dm - deletion match (score)
* sm - substitution match (score)
"""
best_action = None
best_match_count = -1
min_cost = min(ic, dc, sc)
if min_cost == sc and cost == 0:
best_action = EQUAL
best_match_count = sm
elif min_cost == sc and cost == 1:
best_action = REPLACE
best_match_count = sm
elif min_cost == ic and im > best_match_count:
best_action = INSERT
best_match_count = im
elif min_cost == dc and dm > best_match_count:
best_action = DELETE
best_match_count = dm
else:
raise Exception("internal error: invalid lowest cost action")
return best_action
def highest_match_action(ic, dc, sc, im, dm, sm, cost) -> str:
"""Given the following values, choose the action (insertion, deletion, or
substitution), that results in the highest match score (ties are broken
using the distance values). This is used within the dynamic programming
algorithm.
* ic - insertion cost
* dc - deletion cost
* sc - substitution cost
* im - insertion match (score)
* dm - deletion match (score)
* sm - substitution match (score)
"""
best_action = None
lowest_cost = float("inf")
max_match = max(im, dm, sm)
if max_match == sm and cost == 0:
best_action = EQUAL
lowest_cost = sm
elif max_match == sm and cost == 1:
best_action = REPLACE
lowest_cost = sm
elif max_match == im and ic < lowest_cost:
best_action = INSERT
lowest_cost = ic
elif max_match == dm and dc < lowest_cost:
best_action = DELETE
lowest_cost = dc
else:
raise Exception("internal error: invalid highest match action")
return best_action
class SequenceMatcher(object):
"""
Similar to the :py:mod:`difflib` :py:class:`~difflib.SequenceMatcher`, but
uses Levenshtein/edit distance.
"""
def __init__(
self,
a: Optional[Sequence] = None,
b: Optional[Sequence] = None,
test=operator.eq,
action_function=lowest_cost_action,
):
"""
Initialize the object with sequences a and b. Optionally, one can
specify a test function that is used to compare sequence elements. This
defaults to the built in ``eq`` operator (i.e. :py:func:`operator.eq`).
"""
if a is None:
a = []
if b is None:
b = []
self.seq1 = a
self.seq2 = b
self._reset_object()
self.action_function = action_function
self.test = test
self.dist = None
self._matches = None
self.opcodes = None
def set_seqs(self, a: Sequence, b: Sequence) -> None:
"""Specify two alternative sequences -- reset any cached values."""
self.set_seq1(a)
self.set_seq2(b)
self._reset_object()
def _reset_object(self) -> None:
"""Clear out the cached values for distance, matches, and opcodes."""
self.opcodes = None
self.dist = None
self._matches = None
def set_seq1(self, a: Sequence) -> None:
"""Specify a new sequence for sequence 1, resetting cached values."""
self._reset_object()
self.seq1 = a
def set_seq2(self, b: Sequence) -> None:
"""Specify a new sequence for sequence 2, resetting cached values."""
self._reset_object()
self.seq2 = b
def find_longest_match(self, alo, ahi, blo, bhi) -> None:
"""Not implemented!"""
raise NotImplementedError()
def get_matching_blocks(self):
"""Similar to :py:meth:`get_opcodes`, but returns only the opcodes that are
equal and returns them in a somewhat different format
(i.e. ``(i, j, n)`` )."""
opcodes = self.get_opcodes()
match_opcodes = filter(lambda x: x[0] == EQUAL, opcodes)
return map(
lambda opcode: [opcode[1], opcode[3], opcode[2] - opcode[1]], match_opcodes
)
def get_opcodes(self):
"""Returns a list of opcodes. Opcodes are the same as defined by
:py:mod:`difflib`."""
if not self.opcodes:
d, m, opcodes = edit_distance_backpointer(
self.seq1,
self.seq2,
action_function=self.action_function,
test=self.test,
)
if self.dist:
assert d == self.dist
if self._matches:
assert m == self._matches
self.dist = d
self._matches = m
self.opcodes = opcodes
return self.opcodes
def get_grouped_opcodes(self, n=None):
"""Not implemented!"""
raise NotImplementedError()
def ratio(self) -> float:
"""Ratio of matches to the average sequence length."""
return 2.0 * self.matches() / (len(self.seq1) + len(self.seq2))
def quick_ratio(self) -> float:
"""Same as :py:meth:`ratio`."""
return self.ratio()
def real_quick_ratio(self) -> float:
"""Same as :py:meth:`ratio`."""
return self.ratio()
def _compute_distance_fast(self) -> None:
"""Calls edit_distance, and asserts that if we already have values for
matches and distance, that they match."""
d, m = edit_distance(
self.seq1, self.seq2, action_function=self.action_function, test=self.test
)
if self.dist:
assert d == self.dist
if self._matches:
assert m == self._matches
self.dist = d
self._matches = m
def distance(self):
"""Returns the edit distance of the two loaded sequences. This should
be a little faster than getting the same information from
:py:meth:`get_opcodes`."""
if not self.dist:
self._compute_distance_fast()
return self.dist
def matches(self):
"""Returns the number of matches in the alignment of the two sequences.
This should be a little faster than getting the same information from
:py:meth:`get_opcodes`."""
if not self._matches:
self._compute_distance_fast()
return self._matches
def edit_distance(
seq1: Sequence, seq2: Sequence, action_function=lowest_cost_action, test=operator.eq
):
"""
Computes the edit distance between the two given sequences. This uses the
relatively fast method that only constructs two columns of the 2d array
for edits. This function actually uses four columns because we track the
number of matches too.
"""
m = len(seq1)
n = len(seq2)
# Special, easy cases:
if seq1 == seq2:
return 0, n
if m == 0:
return n, 0
if n == 0:
return m, 0
v0 = [0] * (n + 1) # The two 'error' columns
v1 = [0] * (n + 1)
m0 = [0] * (n + 1) # The two 'match' columns
m1 = [0] * (n + 1)
for i in range(1, n + 1):
v0[i] = i
for i in range(1, m + 1):
v1[0] = i
for j in range(1, n + 1):
cost = 0 if test(seq1[i - 1], seq2[j - 1]) else 1
# The costs
ins_cost = v1[j - 1] + 1
del_cost = v0[j] + 1
sub_cost = v0[j - 1] + cost
# Match counts
ins_match = m1[j - 1]
del_match = m0[j]
sub_match = m0[j - 1] + int(not cost)
action = action_function(
ins_cost, del_cost, sub_cost, ins_match, del_match, sub_match, cost
)
if action in [EQUAL, REPLACE]:
v1[j] = sub_cost
m1[j] = sub_match
elif action == INSERT:
v1[j] = ins_cost
m1[j] = ins_match
elif action == DELETE:
v1[j] = del_cost
m1[j] = del_match
else:
raise Exception("Invalid dynamic programming option returned!")
# Copy the columns over
for k in range(n + 1):
v0[k] = v1[k]
m0[k] = m1[k]
return v1[n], m1[n]
def edit_distance_backpointer(
seq1, seq2, action_function=lowest_cost_action, test=operator.eq
):
"""
Similar to :py:func:`~edit_distance.edit_distance` except that this
function keeps backpointers during the search. This allows us to return
the opcodes (i.e. the specific edits that were used to change from one
string to another). This function contructs the full 2d array for the
backpointers only.
"""
m: int = len(seq1)
n: int = len(seq2)
# backpointer array:
bp = [[None for _ in range(n + 1)] for _ in range(m + 1)]
# Two columns of the distance and match arrays
d0 = [0] * (n + 1) # The two 'distance' columns
d1 = [0] * (n + 1)
m0 = [0] * (n + 1) # The two 'match' columns
m1 = [0] * (n + 1)
# Fill in the first column
for i in range(1, n + 1):
d0[i] = i
bp[0][i] = INSERT
for i in range(1, m + 1):
d1[0] = i
bp[i][0] = DELETE
for j in range(1, n + 1):
cost = 0 if test(seq1[i - 1], seq2[j - 1]) else 1
# The costs of each action...
ins_cost = d1[j - 1] + 1 # insertion
del_cost = d0[j] + 1 # deletion
sub_cost = d0[j - 1] + cost # substitution/match
# The match scores of each action
ins_match = m1[j - 1]
del_match = m0[j]
sub_match = m0[j - 1] + int(not cost)
action = action_function(
ins_cost, del_cost, sub_cost, ins_match, del_match, sub_match, cost
)
if action == EQUAL:
d1[j] = sub_cost
m1[j] = sub_match
bp[i][j] = EQUAL
elif action == REPLACE:
d1[j] = sub_cost
m1[j] = sub_match
bp[i][j] = REPLACE
elif action == INSERT:
d1[j] = ins_cost
m1[j] = ins_match
bp[i][j] = INSERT
elif action == DELETE:
d1[j] = del_cost
m1[j] = del_match
bp[i][j] = DELETE
else:
raise Exception("Invalid dynamic programming action returned!")
# copy over the columns
for k in range(n + 1):
d0[k] = d1[k]
m0[k] = m1[k]
opcodes = get_opcodes_from_bp_table(bp)
return d1[n], m1[n], opcodes
def get_opcodes_from_bp_table(bp):
"""Given a 2d list structure, create opcodes from the best path."""
x = len(bp) - 1
y = len(bp[0]) - 1
opcodes = []
while x != 0 or y != 0:
this_bp = bp[x][y]
if this_bp in [EQUAL, REPLACE]:
opcodes.append([this_bp, max(x - 1, 0), x, max(y - 1, 0), y])
x = x - 1
y = y - 1
elif this_bp == INSERT:
opcodes.append([INSERT, x, x, max(y - 1, 0), y])
y = y - 1
elif this_bp == DELETE:
opcodes.append([DELETE, max(x - 1, 0), x, max(y - 1, 0), max(y - 1, 0)])
x = x - 1
else:
raise Exception("Invalid dynamic programming action in BP table!")
opcodes.reverse()
return opcodes
def main() -> int:
"""Read two files line-by-line and print edit distances between each pair
of lines. Will terminate at the end of the shorter of the two files."""
if len(sys.argv) != 3:
print(f"Usage: {sys.argv[0]} <file1> <file2>")
exit(-1)
file1 = sys.argv[1]
file2 = sys.argv[2]
with open(file1) as f1, open(file2) as f2:
for line1, line2 in zip(f1, f2):
print(f"Line 1: {line1.strip()}")
print(f"Line 2: {line2.strip()}")
dist, _, _ = edit_distance_backpointer(line1.split(), line2.split())
print(f"Distance: {dist}")
print("=" * 80)
return 0
if __name__ == "__main__":
main()
|
{
"content_hash": "1ebfe3e77e3987e99a66cbfc5af6068c",
"timestamp": "",
"source": "github",
"line_count": 401,
"max_line_length": 88,
"avg_line_length": 31.600997506234414,
"alnum_prop": 0.5420612373737373,
"repo_name": "belambert/edit-distance",
"id": "6119bc1ea1e8a93c930b0667d9198ef903d02a37",
"size": "13274",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "edit_distance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "99023"
}
],
"symlink_target": ""
}
|
from nailgun.db.sqlalchemy.models.cluster_plugin_link import ClusterPluginLink
from nailgun.test.base import BaseIntegrationTest
from nailgun.utils import reverse
from oslo_serialization import jsonutils
class TestHandlers(BaseIntegrationTest):
def setUp(self):
super(TestHandlers, self).setUp()
self.cluster = self.env.create_cluster(api=False)
self.cluster_plugin_link = self.env \
.create_cluster_plugin_link(cluster_id=self.cluster.id)
def test_cluster_plugin_link_update(self):
cluster_plugin_link_update = {
'title': 'new title 2',
'description': 'new description 2'
}
resp = self.app.put(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': self.cluster_plugin_link.id}
),
jsonutils.dumps(cluster_plugin_link_update),
headers=self.default_headers
)
self.assertEqual(self.cluster_plugin_link.id, resp.json_body['id'])
self.assertEqual('new title 2', resp.json_body['title'])
self.assertEqual('new description 2', resp.json_body['description'])
self.assertEqual(self.cluster_plugin_link.url, resp.json_body['url'])
def test_cluster_plugin_link_get_with_cluster(self):
resp = self.app.get(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': self.cluster_plugin_link.id}
),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
self.assertEqual(self.cluster_plugin_link.id, resp.json_body['id'])
self.assertEqual(self.cluster_plugin_link.title,
resp.json_body['title'])
self.assertEqual(self.cluster_plugin_link.url, resp.json_body['url'])
self.assertEqual(self.cluster_plugin_link.description,
resp.json_body['description'])
self.assertEqual(self.cluster_plugin_link.hidden,
resp.json_body['hidden'])
def test_cluster_plugin_link_not_found(self):
resp = self.app.get(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': self.cluster_plugin_link.id + 1}
),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(404, resp.status_code)
def test_cluster_plugin_link_delete(self):
resp = self.app.delete(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': self.cluster_plugin_link.id}
),
headers=self.default_headers,
)
self.assertEqual(204, resp.status_code)
d_e_query = self.db.query(ClusterPluginLink) \
.filter_by(cluster_id=self.cluster.id)
self.assertEquals(d_e_query.count(), 0)
def test_cluster_plugin_link_patch(self):
cluster_plugin_link_update = {
'title': 'new title 3',
'description': 'new description 3',
'hidden': True
}
resp = self.app.patch(
reverse(
'ClusterPluginLinkHandler',
kwargs={'cluster_id': self.cluster['id'],
'obj_id': self.cluster_plugin_link.id}
),
jsonutils.dumps(cluster_plugin_link_update),
headers=self.default_headers
)
self.assertEqual(self.cluster_plugin_link.id, resp.json_body['id'])
self.assertEqual('new title 3', resp.json_body['title'])
self.assertEqual('new description 3', resp.json_body['description'])
self.assertEqual(self.cluster_plugin_link.url, resp.json_body['url'])
self.assertEqual(True, resp.json_body['hidden'])
|
{
"content_hash": "2cbd9ac9db01f3f3053bc6b0f312d85f",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 78,
"avg_line_length": 39.87128712871287,
"alnum_prop": 0.5776011919543084,
"repo_name": "huntxu/fuel-web",
"id": "33be3b3ba07e689884221a9cc0b85f1e446df9c1",
"size": "4662",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nailgun/nailgun/test/integration/test_cluster_plugin_links_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "97543"
},
{
"name": "HTML",
"bytes": "2844"
},
{
"name": "JavaScript",
"bytes": "815534"
},
{
"name": "Mako",
"bytes": "1943"
},
{
"name": "Python",
"bytes": "3710735"
},
{
"name": "Ruby",
"bytes": "13649"
},
{
"name": "Shell",
"bytes": "22527"
}
],
"symlink_target": ""
}
|
from __future__ import division
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
import time, re, datetime
import jinja2
import os
import json
import math
import numpy
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
def fraction(value):
return value - math.trunc(value)
def stem_char(value, range_minimum, range_maximum, interval, power):
factor = pow(10.0, power)
delta = factor / 1000.0
range_size = range_maximum - range_minimum
range_midpoint = (range_maximum + range_minimum) / 2.0
value_less_than_midpoint = False
if value > 0:
if range_midpoint - value - delta > 0.0:
value_less_than_midpoint = True
else:
if range_midpoint - value + delta < 0.0:
value_less_than_midpoint = True
format = '%0' + str(power) + 'd'
if value < 0:
x1 = math.fabs(math.fabs(value) - math.fabs(range_maximum))
else:
x1 = value - range_minimum
if math.fabs(value) < 1.0:
x2 = int(x1 / factor * 10.0 + delta)
else:
x2 = int(x1)
x3 = format % x2
if interval == 1:
retval = x3[0]
else:
if value_less_than_midpoint:
retval = x3[0]
else:
retval = '@'
return retval
def compute_group_info(maximum_value):
power = 1
if maximum_value < 1:
while 10.0 ** power > maximum_value:
power -= 1
power += 1
interval = 5
step_size = (10.0 ** power) * interval
while (maximum_value / step_size) < 5:
if interval == 5:
interval = 2
else:
if interval == 2:
interval = 1
else:
interval = 5
power -= 1
step_size = (10.0 ** power) * interval
else:
interval = 1
step_size = int((10.0 ** power) * interval)
while (maximum_value / step_size) > 20:
if interval == 1:
interval = 2
else:
if interval == 2:
interval = 5
else:
interval = 1
power += 1
step_size = int((10.0 ** power) * interval)
return (step_size, interval, power)
def condense(points):
points.sort()
count = len(points)
upper_quartile = points[math.trunc(round(len(points)*0.75))]
lower_quartile = points[math.trunc(round(len(points)*0.25))]
iq = upper_quartile - lower_quartile
upper_outlier_bound = upper_quartile + iq
lower_outlier_bound = lower_quartile - iq
mean = sum(points)/len(points)
deviations = [x - mean for x in points]
result = {
'sum': sum(points),
'count': count,
'minimum': min(points),
'maximum': max(points),
'mean': mean,
'mean_deviation': sum([abs(x - mean) for x in points])/count,
'median': numpy.median(points),
'standard_deviation': pow(sum([pow(x - mean, 2) for x in points])/count, 0.5),
'upper_quartile': upper_quartile,
'lower_quartile': lower_quartile,
'upper_outliers': [x for x in points if x > upper_outlier_bound],
'lower_outliers': [x for x in points if x < lower_outlier_bound]
}
return result
def outlier_character(value):
if value == 0:
return ' '
if value < 10:
return str(value)
return '+'
def box_plot_text_horiz(lower_outlier_pos, scaled_lower_outliers, lower_quartile_pos, median_pos, upper_quartile_pos, upper_outlier_pos, scaled_upper_outliers, scale):
lines = []
line = ''
# lower outliers
for i in range(0, lower_outlier_pos - 1):
line += outlier_character(scaled_lower_outliers.get(i, 0))
line += '['
# lower quartile
line += '-' * (lower_quartile_pos - lower_outlier_pos - 2)
line += 'L'
# points around median
line += '-' * (median_pos - lower_quartile_pos - 2)
line += 'M'
line += '-' * (upper_quartile_pos - median_pos - 2)
# upper quartile
line += 'U'
line += '-' * (upper_outlier_pos - upper_quartile_pos - 2)
# upper outliers
line += ']'
for i in range(upper_outlier_pos + 1, int(scale)):
line += outlier_character(scaled_upper_outliers.get(i, 0))
lines.append(line)
return lines
def box_plot_text(condensed_points, orientation):
data_range = condensed_points['maximum'] - condensed_points['minimum']
scale = 100.0
min_pos = 0
max_pos = int(scale)
median_pos = int((condensed_points['median'] - condensed_points['minimum']) / data_range * scale)
upper_quartile_pos = int((condensed_points['upper_quartile'] - condensed_points['minimum']) / data_range * scale)
lower_quartile_pos = int((condensed_points['lower_quartile'] - condensed_points['minimum']) / data_range * scale)
iq = upper_quartile_pos - lower_quartile_pos
upper_outlier_pos = min(upper_quartile_pos + iq, int(scale))
lower_outlier_pos = max(lower_quartile_pos - iq, 0)
scaled_lower_outliers = {}
for point in condensed_points['lower_outliers']:
scaled_point = int((point - condensed_points['minimum']) / data_range * scale)
scaled_lower_outliers[scaled_point] = scaled_lower_outliers.get(scaled_point, 0) + 1
scaled_upper_outliers = {}
for point in condensed_points['upper_outliers']:
scaled_point = int((point - condensed_points['minimum']) / data_range * scale)
scaled_upper_outliers[scaled_point] = scaled_upper_outliers.get(scaled_point, 0) + 1
lines = []
if orientation == 'horizontal':
lines = box_plot_text_horiz(lower_outlier_pos, scaled_lower_outliers, lower_quartile_pos, median_pos, upper_quartile_pos, upper_outlier_pos, scaled_upper_outliers, scale)
return lines
def box_plot_processing(condensed_values, orientation):
if orientation == 'vertical':
template = jinja_environment.get_template('templates/vertical_boxplot.processing.jinja')
else:
template = jinja_environment.get_template('templates/horizontal_boxplot.processing.jinja')
return template.render(condensed_values)
def group_values(values):
groups = []
maximum = max(values)
minimum = min(values)
data_range = maximum - minimum
step_size, interval, power = compute_group_info(data_range)
i = 0
while i > minimum:
i -= step_size
while i < 0:
range_minimum = i
range_maximum = range_minimum + step_size
if math.fabs(range_maximum) < 1e-16:
range_maximum = 0
group_values = []
for value in values:
if value >= range_minimum and value < range_maximum:
group_values.append(value)
group_values.sort()
groups.append( { 'minimum': range_minimum, 'maximum': range_maximum, 'values': group_values } )
i += step_size
if math.fabs(i) < 1e-16:
i = 0
i = 0
while i < maximum:
range_minimum = i
range_maximum = range_minimum + step_size
group_values = []
for value in values:
if value >= range_minimum and value < range_maximum:
group_values.append(value)
group_values.sort()
groups.append( { 'minimum': range_minimum, 'maximum': range_maximum, 'values': group_values } )
i += step_size
return groups
def stem_graph(groups):
stem_tuples = []
range_maximum = 0
range_minimum = 0
for group in groups:
range_maximum = max(group["maximum"], range_maximum)
range_minimum = min(group["minimum"], range_minimum)
step_size, interval, leader_power = compute_group_info(range_maximum - range_minimum)
for group in groups:
leader = int(group['minimum'] / pow(10, leader_power))
value_chars = []
for value in group['values']:
char = stem_char(value, group["minimum"], group["maximum"], interval, leader_power)
value_chars.append(char)
value_chars.sort()
stem_tuples.append( {'leader': leader, 'values': ''.join(value_chars)} )
return stem_tuples
def stem_tuples_to_text(stem_tuples):
text_lines = []
leader_length = 2 if (len(stem_tuples) > 10) else 1
for stem_tuple in stem_tuples:
leader = stem_tuple['leader']
this_leader_length = leader_length
if leader < 0:
this_leader_length += 1
leader_format = '%0' + str(this_leader_length) + 'd'
leader_string = leader_format % leader
text_lines.append(leader_string + '|' + stem_tuple['values'])
return text_lines
def transform_power(values, power):
transformed_values = []
for value in values:
transformed_values.append(value ** power)
return transformed_values
def transform_log(values):
transformed_values = []
for value in values:
transformed_values.append(math.log10(value))
return transformed_values
def transform_normalize(values):
bias = min(values)
scale = max(values) - bias
transformed_values = []
for value in values:
transformed_values.append((value - bias) / scale)
return transformed_values
def transform_zerobase(values):
transformed_values = []
bias = min(values)
for value in values:
transformed_values.append(value - bias)
return transformed_values
class MainPage(webapp.RequestHandler):
def get(self):
template_values = { }
template = jinja_environment.get_template('templates/index.html')
self.response.out.write(template.render(template_values))
class CondenseValues(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write( json.dumps( condense( json.loads(self.request.body) ) ) )
class BoxPlot(webapp.RequestHandler):
def post(self):
body = self.request.body
format = self.request.get('format', 'text')
orientation = self.request.get('orientation', 'horizontal')
values = json.loads(body)
box_plot_lines = []
if format == 'text':
box_plot_lines = box_plot_text(values, orientation)
for line in box_plot_lines:
self.response.out.write(line)
self.response.out.write("\n")
if format == 'processing':
box_plot_lines = box_plot_processing(values, orientation)
self.response.out.write(box_plot_lines)
class GroupValues(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'application/json'
values = json.loads(self.request.body)
stem = group_values(values)
self.response.out.write( json.dumps(stem) )
class StemGraph(webapp.RequestHandler):
def post(self):
body = self.request.body
stem = json.loads(body)
stem_tuples = stem_graph(stem)
stem_texts = stem_tuples_to_text(stem_tuples)
for text_line in stem_texts:
self.response.out.write(text_line)
self.response.out.write("\n")
class TransformPower(webapp.RequestHandler):
def post(self):
body = self.request.body
values = json.loads(body)
power = float(self.request.get('power', '1.0'))
transformed_values = transform_power(values, power)
self.response.out.write( json.dumps( transformed_values ) )
class TransformLog(webapp.RequestHandler):
def post(self):
body = self.request.body
values = json.loads(body)
transformed_values = transform_log(values)
self.response.out.write( json.dumps( transformed_values ) )
class TransformNormalize(webapp.RequestHandler):
def post(self):
body = self.request.body
values = json.loads(body)
transformed_values = transform_normalize(values)
self.response.out.write( json.dumps( transformed_values ) )
class TransformZeroBase(webapp.RequestHandler):
def post(self):
body = self.request.body
values = json.loads(body)
transformed_values = transform_zerobase(values)
self.response.out.write( json.dumps( transformed_values ) )
application = webapp.WSGIApplication(
[
('/', MainPage),
('/condense', CondenseValues),
('/boxplot', BoxPlot),
('/group', GroupValues),
('/stemgraph', StemGraph),
('/transform/power', TransformPower),
('/transform/log', TransformLog),
('/transform/normalize', TransformNormalize),
('/transform/zerobase', TransformZeroBase)
],
debug=False)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
{
"content_hash": "a26230af55d48904be63295149f54e2a",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 172,
"avg_line_length": 30.437837837837836,
"alnum_prop": 0.6924169774462795,
"repo_name": "jfitz/interact-data-analysis",
"id": "fb7b2d11d649f8240efcba94a049304120456dd8",
"size": "11262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_analysis.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15666"
},
{
"name": "Shell",
"bytes": "2738"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
import datetime
import sys
if sys.version_info[0] == 3:
unicode_str = '\u2603'
else:
unicode_str = unicode('snowman')
import validictory
class TestType(TestCase):
def test_schema(self):
schema = {
"type": [
{"type": "array", "minItems": 10},
{"type": "string", "pattern": "^0+$"}
]
}
data1 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
data2 = "0"
data3 = 1203
for x in [data1, data2]:
try:
validictory.validate(x, schema)
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
self.assertRaises(ValueError, validictory.validate, data3, schema)
def _test_type(self, typename, valids, invalids):
for x in valids:
try:
validictory.validate(x, {"type": typename})
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
for x in invalids:
self.assertRaises(ValueError, validictory.validate, x,
{"type": typename})
def test_integer(self):
valid_ints = [1, -89, 420000]
invalid_ints = [1.2, "bad", {"test":"blah"}, [32, 49], None, True]
self._test_type('integer', valid_ints, invalid_ints)
def test_string(self):
valids = ["abc", unicode_str]
invalids = [1.2, 1, {"test":"blah"}, [32, 49], None, True]
self._test_type('string', valids, invalids)
def test_number(self):
valids = [1.2, -89.42, 48, -32]
invalids = ["bad", {"test":"blah"}, [32.42, 494242], None, True]
self._test_type('number', valids, invalids)
def test_boolean(self):
valids = [True, False]
invalids = [1.2, "False", {"test":"blah"}, [32, 49], None, 1, 0]
self._test_type('boolean', valids, invalids)
def test_object(self):
valids = [{"blah": "test"}, {"this":{"blah":"test"}}, {1:2, 10:20}]
invalids = [1.2, "bad", 123, [32, 49], None, True]
self._test_type('object', valids, invalids)
def test_array(self):
valids = [[1, 89], [48, {"test":"blah"}, "49", 42], (47, 11)]
invalids = [1.2, "bad", {"test":"blah"}, 1234, None, True]
self._test_type('array', valids, invalids)
def test_null(self):
valids = [None]
invalids = [1.2, "bad", {"test":"blah"}, [32, 49], 1284, True]
self._test_type('null', valids, invalids)
def test_any(self):
valids = [1.2, "bad", {"test":"blah"}, [32, 49], None, 1284, True]
self._test_type('any', valids, [])
def test_default(self):
# test default value (same as any really)
valids = [1.2, "bad", {"test":"blah"}, [32, 49], None, 1284, True]
for x in valids:
try:
validictory.validate(x, {})
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
def test_multi(self):
types = ["null", "integer", "string"]
valids = [None, 42, "string"]
invalids = [1.2, {"test":"blah"}, [32, 49], True]
self._test_type(types, valids, invalids)
self._test_type(tuple(types), valids, invalids)
class TestDisallow(TestType):
def _test_type(self, typename, valids, invalids):
for x in invalids:
try:
validictory.validate(x, {"disallow": typename})
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
for x in valids:
self.assertRaises(ValueError, validictory.validate, x,
{"disallow": typename})
class DateValidator(validictory.validator.SchemaValidator):
def validate_type_date(self, value):
return isinstance(value, datetime.date)
def validate_type_datetime(self, value):
return isinstance(value, datetime.datetime)
class TestCustomType(TestCase):
def test_date(self):
self._test_type('date', [datetime.date.today()],
[2010, '2010'])
def test_datetime(self):
self._test_type('datetime', [datetime.datetime.now()],
[2010, '2010', datetime.date.today()])
def test_either(self):
self._test_type(['datetime', 'date'],
[datetime.date.today(), datetime.datetime.now()],
[2010, '2010'])
def _test_type(self, typename, valids, invalids):
validator = DateValidator()
for x in valids:
try:
validator.validate(x, {"type": typename})
except ValueError as e:
self.fail("Unexpected failure: %s" % e)
for x in invalids:
self.assertRaises(ValueError, validator.validate, x,
{"type": typename})
|
{
"content_hash": "21ea92a8fd2330ffabdf7e058f79f5a5",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 75,
"avg_line_length": 32.972972972972975,
"alnum_prop": 0.5286885245901639,
"repo_name": "ahassany/validictory",
"id": "e65381a8d0ca474bdc9a64180ec3a6de643c314e",
"size": "4880",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "validictory/tests/test_type.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "71338"
}
],
"symlink_target": ""
}
|
__author__ = 'abdul'
import os
from mongoctl.processes import create_subprocess
from mongoctl.mongoctl_logging import *
from mongoctl import repository
from mongoctl.utils import execute_command
###############################################################################
# CONSTS
###############################################################################
DEFAULT_TAIL_LINES = 15
###############################################################################
# tail log command
###############################################################################
def tail_log_command(parsed_options):
server = repository.lookup_server(parsed_options.server)
server.validate_local_op("tail-log")
log_path = server.get_log_file_path()
# check if log file exists
if os.path.exists(log_path):
log_tailer = tail_server_log(server)
log_tailer.communicate()
else:
log_info("Log file '%s' does not exist." % log_path)
###############################################################################
def tail_server_log(server):
try:
logpath = server.get_log_file_path()
# touch log file to make sure it exists
log_verbose("Touching log file '%s'" % logpath)
execute_command(["touch", logpath])
tail_cmd = ["tail", "-f", "-n", str(DEFAULT_TAIL_LINES), logpath]
log_verbose("Executing command: %s" % (" ".join(tail_cmd)))
return create_subprocess(tail_cmd)
except Exception, e:
log_exception(e)
log_error("Unable to tail server log file. Cause: %s" % e)
return None
###############################################################################
def stop_tailing(log_tailer):
try:
if log_tailer:
log_verbose("-- Killing tail log path subprocess")
log_tailer.terminate()
except Exception, e:
log_exception(e)
log_verbose("Failed to kill tail subprocess. Cause: %s" % e)
|
{
"content_hash": "27954d3fc8d96fee8d127756007b2149",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 79,
"avg_line_length": 36.333333333333336,
"alnum_prop": 0.4831804281345566,
"repo_name": "mongolab/mongoctl",
"id": "c4460f267ad91b788d98ab92e3debb62041fd3d4",
"size": "1962",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mongoctl/commands/server/tail_log.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "470598"
}
],
"symlink_target": ""
}
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('private_sharing', '0015_auto_20190124_1849'),
]
operations = [
migrations.AddField(
model_name='datarequestprojectmember',
name='last_authorized',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), size=2), default=list, editable=False, size=None),
),
migrations.AddField(
model_name='datarequestprojectmember',
name='last_joined',
field=django.contrib.postgres.fields.ArrayField(base_field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=32), size=2), default=list, editable=False, size=None),
),
]
|
{
"content_hash": "e75001bf87c43722912e1d7cb222b7f1",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 207,
"avg_line_length": 40.72727272727273,
"alnum_prop": 0.6808035714285714,
"repo_name": "OpenHumans/open-humans",
"id": "5b5cf5d15e111c761b4de72a59e52182c10f052c",
"size": "945",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "private_sharing/migrations/0016_auto_20190128_2111.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23921"
},
{
"name": "HTML",
"bytes": "372870"
},
{
"name": "JavaScript",
"bytes": "38991"
},
{
"name": "Python",
"bytes": "519685"
},
{
"name": "SCSS",
"bytes": "7823"
},
{
"name": "Shell",
"bytes": "721"
}
],
"symlink_target": ""
}
|
from django import forms
from django.core.exceptions import PermissionDenied
from django.db import router
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ungettext
from django.utils.text import capfirst
from xadmin.sites import site
from xadmin.util import model_format_dict, get_deleted_objects, model_ngettext
from xadmin.views import BaseAdminPlugin, ListAdminView
from xadmin.views.base import filter_hook, ModelAdminView
from collections import OrderedDict
ACTION_CHECKBOX_NAME = '_selected_action'
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
def action_checkbox(obj):
return checkbox.render(ACTION_CHECKBOX_NAME, force_str(obj.pk))
action_checkbox.short_description = mark_safe(
'<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
action_checkbox.allow_export = False
action_checkbox.is_column = False
class BaseActionView(ModelAdminView):
action_name = None
description = None
icon = 'fa fa-tasks'
model_perm = 'change'
@classmethod
def has_perm(cls, list_view):
return list_view.get_model_perms()[cls.model_perm]
def init_action(self, list_view):
self.list_view = list_view
self.admin_site = list_view.admin_site
@filter_hook
def do_action(self, queryset):
pass
class DeleteSelectedAction(BaseActionView):
action_name = "delete_selected"
description = _(u'Delete selected %(verbose_name_plural)s')
delete_confirmation_template = None
delete_selected_confirmation_template = None
delete_models_batch = True
model_perm = 'delete'
icon = 'fa fa-times'
@filter_hook
def delete_models(self, queryset):
n = queryset.count()
if n:
if self.delete_models_batch:
queryset.delete()
else:
for obj in queryset:
obj.delete()
self.message_user(_("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
}, 'success')
@filter_hook
def do_action(self, queryset):
# Check that the user has delete permission for the actual model
if not self.has_delete_permission():
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, model_count, perms_needed, protected = get_deleted_objects(
queryset, self.opts, self.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if self.request.POST.get('post'):
if perms_needed:
raise PermissionDenied
self.delete_models(queryset)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_str(self.opts.verbose_name)
else:
objects_name = force_str(self.opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = self.get_context()
context.update({
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
"model_count": dict(model_count).items(),
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": self.opts,
"app_label": self.app_label,
'action_checkbox_name': ACTION_CHECKBOX_NAME,
})
# Display the confirmation page
return TemplateResponse(self.request, self.delete_selected_confirmation_template or
self.get_template_list('views/model_delete_selected_confirm.html'), context, current_app=self.admin_site.name)
class ActionPlugin(BaseAdminPlugin):
# Actions
actions = []
actions_selection_counter = True
global_actions = [DeleteSelectedAction]
def init_request(self, *args, **kwargs):
self.actions = self.get_actions()
return bool(self.actions)
def get_list_display(self, list_display):
if self.actions:
list_display.insert(0, 'action_checkbox')
self.admin_view.action_checkbox = action_checkbox
return list_display
def get_list_display_links(self, list_display_links):
if self.actions:
if len(list_display_links) == 1 and list_display_links[0] == 'action_checkbox':
return list(self.admin_view.list_display[1:2])
return list_display_links
def get_context(self, context):
if self.actions and self.admin_view.result_count:
av = self.admin_view
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', av.result_count)
new_context = {
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(av.result_list)},
'selection_note_all': selection_note_all % {'total_count': av.result_count},
'action_choices': self.get_action_choices(),
'actions_selection_counter': self.actions_selection_counter,
}
context.update(new_context)
return context
def post_response(self, response, *args, **kwargs):
request = self.admin_view.request
av = self.admin_view
# Actions with no confirmation
if self.actions and 'action' in request.POST:
action = request.POST['action']
if action not in self.actions:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
av.message_user(msg)
else:
ac, name, description, icon = self.actions[action]
select_across = request.POST.get('select_across', False) == '1'
selected = request.POST.getlist(ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
av.message_user(msg)
else:
queryset = av.list_queryset._clone()
if not select_across:
# Perform the action only on the selected objects
queryset = av.list_queryset.filter(pk__in=selected)
response = self.response_action(ac, queryset)
# Actions may return an HttpResponse, which will be used as the
# response from the POST. If not, we'll be a good little HTTP
# citizen and redirect back to the changelist page.
if isinstance(response, HttpResponse):
return response
else:
return HttpResponseRedirect(request.get_full_path())
return response
def response_action(self, ac, queryset):
if isinstance(ac, type) and issubclass(ac, BaseActionView):
action_view = self.get_model_view(ac, self.admin_view.model)
action_view.init_action(self.admin_view)
return action_view.do_action(queryset)
else:
return ac(self.admin_view, self.request, queryset)
def get_actions(self):
if self.actions is None:
return OrderedDict()
actions = [self.get_action(action) for action in self.global_actions]
for klass in self.admin_view.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
if not class_actions:
continue
actions.extend(
[self.get_action(action) for action in class_actions])
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into a OrderedDict keyed by name.
actions = OrderedDict([
(name, (ac, name, desc, icon))
for ac, name, desc, icon in actions
])
return actions
def get_action_choices(self):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = []
for ac, name, description, icon in six.itervalues(self.actions):
choice = (name, description % model_format_dict(self.opts), icon)
choices.append(choice)
return choices
def get_action(self, action):
if isinstance(action, type) and issubclass(action, BaseActionView):
if not action.has_perm(self.admin_view):
return None
return action, getattr(action, 'action_name'), getattr(action, 'description'), getattr(action, 'icon')
elif callable(action):
func = action
action = action.__name__
elif hasattr(self.admin_view.__class__, action):
func = getattr(self.admin_view.__class__, action)
else:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description, getattr(func, 'icon', 'tasks')
# View Methods
def result_header(self, item, field_name, row):
if item.attr and field_name == 'action_checkbox':
item.classes.append("action-checkbox-column")
return item
def result_item(self, item, obj, field_name, row):
if item.field is None and field_name == u'action_checkbox':
item.classes.append("action-checkbox")
return item
# Media
def get_media(self, media):
if self.actions and self.admin_view.result_count:
media = media + self.vendor('xadmin.plugin.actions.js', 'xadmin.plugins.css')
return media
# Block Views
def block_results_bottom(self, context, nodes):
if self.actions and self.admin_view.result_count:
nodes.append(loader.render_to_string('xadmin/blocks/model_list.results_bottom.actions.html', context_instance=context))
site.register_plugin(ActionPlugin, ListAdminView)
|
{
"content_hash": "92cfdfa94d86b080383742eceea8d0f6",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 142,
"avg_line_length": 37.69178082191781,
"alnum_prop": 0.6073051063056515,
"repo_name": "cupen/django-xadmin",
"id": "1d6248cf6028200feec3c86e096d7b7a3bfa5858",
"size": "11020",
"binary": false,
"copies": "1",
"ref": "refs/heads/django1.8",
"path": "xadmin/plugins/actions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "23733"
},
{
"name": "HTML",
"bytes": "95746"
},
{
"name": "JavaScript",
"bytes": "66338"
},
{
"name": "Python",
"bytes": "419197"
},
{
"name": "Shell",
"bytes": "713"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('front', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='lien',
name='disabled',
field=models.BooleanField(default=False),
),
]
|
{
"content_hash": "e17427111df713f7aa6e272e020f44a5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 53,
"avg_line_length": 20.333333333333332,
"alnum_prop": 0.5846994535519126,
"repo_name": "ahemery/ezreports",
"id": "9bb72730c20c5ee76a1ed49900f2a9ee374a0f92",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front/migrations/0002_lien_disabled.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "162994"
},
{
"name": "HTML",
"bytes": "43457"
},
{
"name": "JavaScript",
"bytes": "453261"
},
{
"name": "Python",
"bytes": "45850"
}
],
"symlink_target": ""
}
|
from flask import Flask
from flask import render_template, redirect, session, request
app = Flask(__name__)
app.secret_key = 'ThisIsSecret'
@app.route('/')
def nothing():
return render_template('noninja.html')
@app.route('/ninja')
def ninja():
x = 'tmnt'
return render_template('ninja.html', x=x)
@app.route('/ninja/<any>')
def ninjas_colors_any(any):
ninja_dict = {'blue': 'leonardo', 'red': 'raphael', 'purple': 'donatello', 'orange': 'michelangelo'}
if any in ninja_dict:
x = ninja_dict[any]
return render_template('ninja.html', x=x)
else:
x = 'notapril'
return render_template('ninja.html', x=x)
app.run(debug=True)
|
{
"content_hash": "2eabc47f0d7a26830d96177c4124af03",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 103,
"avg_line_length": 22.266666666666666,
"alnum_prop": 0.6482035928143712,
"repo_name": "jiobert/python",
"id": "238d58b83fbc0922eb45a09c3b9552b66f8865d0",
"size": "668",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Smith_Ben/Assignments/disappearing_ninja copy/ninja.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25381"
},
{
"name": "HTML",
"bytes": "256675"
},
{
"name": "JavaScript",
"bytes": "528"
},
{
"name": "Python",
"bytes": "399336"
}
],
"symlink_target": ""
}
|
from functools import wraps
import os.path
import shutil
def ensure_file_exists(file_path):
if not file_exists(file_path):
raise IOError('File does not exist: %s' % file_path)
def ensure_directory_exists(directory_path):
if not directory_exists(directory_path):
raise IOError('Directory does not exist: %s' % directory_path)
def ensure_file_not_exists(file_path):
if file_exists(file_path):
raise IOError('File already exist: %s' % file_path)
def ensure_directory_not_exists(directory_path):
if directory_exists(directory_path):
raise IOError('Directory already exist: %s' % directory_path)
def file_exists(file_path):
return os.path.exists(file_path) and os.path.isfile(file_path)
def directory_exists(directory_path):
return os.path.exists(directory_path) and os.path.isdir(directory_path)
def create_directory_if_not_exists(directory_path):
if not directory_exists(directory_path):
os.makedirs(directory_path)
return
def delete_directory(directory_path):
if directory_exists(directory_path):
shutil.rmtree(directory_path, ignore_errors=True)
def delete_file(file_path):
if file_exists(file_path):
os.remove(file_path)
def last_component_of(file_path):
return os.path.basename(file_path)
def directory_of(file_path):
return os.path.dirname(file_path)
def list_relative_files_with_extension(directory, extension):
return filter(lambda filename: filename.endswith(extension), os.listdir(directory))
def list_absolute_files_with_extension(directory, extension):
relative_paths = list_relative_files_with_extension(directory, extension)
return map(lambda relative_path: join(directory, relative_path), relative_paths)
def join(baseurl, path_to_append):
ensure_directory_exists(baseurl)
return os.path.join(baseurl, path_to_append)
|
{
"content_hash": "f13cc53170e893a6691650e7e7a985f9",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 87,
"avg_line_length": 34.018181818181816,
"alnum_prop": 0.7306253340459647,
"repo_name": "FranDepascuali/wikiquotes-python-api",
"id": "2b367a7e44efaad58f4273f4338e7e680285c52f",
"size": "1871",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wikiquotes/managers/file_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26798"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/general/shared_poi_corl_corral_half_64x64_s03.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "8c41b9c73ef0bb323f221c8f7397aad8",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 94,
"avg_line_length": 24.846153846153847,
"alnum_prop": 0.6996904024767802,
"repo_name": "anhstudios/swganh",
"id": "23538664e69898f7c35eaba9133183dbf907f7bb",
"size": "468",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/static/structure/general/shared_poi_corl_corral_half_64x64_s03.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
from base64 import urlsafe_b64encode
from datetime import datetime, timedelta
from hashlib import sha512
from botocore.exceptions import ClientError as BotoClientError
from flask import current_app
from notifications_utils.letter_timings import LETTER_PROCESSING_DEADLINE
from notifications_utils.postal_address import PostalAddress
from notifications_utils.timezones import convert_utc_to_bst
from app import encryption, notify_celery
from app.aws import s3
from app.config import QueueNames, TaskNames
from app.cronitor import cronitor
from app.dao.notifications_dao import (
dao_get_letters_and_sheets_volume_by_postage,
dao_get_letters_to_be_printed,
dao_get_notification_by_reference,
dao_update_notification,
dao_update_notifications_by_reference,
get_notification_by_id,
update_notification_status_by_id,
)
from app.dao.templates_dao import dao_get_template_by_id
from app.errors import VirusScanError
from app.exceptions import NotificationTechnicalFailureException
from app.letters.utils import (
LetterPDFNotFound,
ScanErrorType,
find_letter_pdf_in_s3,
generate_letter_pdf_filename,
get_billable_units_for_letter_page_count,
get_file_names_from_error_bucket,
get_folder_name,
get_reference_from_filename,
move_error_pdf_to_scan_bucket,
move_failed_pdf,
move_sanitised_letter_to_test_or_live_pdf_bucket,
move_scan_to_invalid_pdf_bucket,
)
from app.models import (
INTERNATIONAL_LETTERS,
INTERNATIONAL_POSTAGE_TYPES,
KEY_TYPE_NORMAL,
KEY_TYPE_TEST,
NOTIFICATION_CREATED,
NOTIFICATION_DELIVERED,
NOTIFICATION_PENDING_VIRUS_CHECK,
NOTIFICATION_TECHNICAL_FAILURE,
NOTIFICATION_VALIDATION_FAILED,
NOTIFICATION_VIRUS_SCAN_FAILED,
POSTAGE_TYPES,
RESOLVE_POSTAGE_FOR_FILE_NAME,
Service,
)
@notify_celery.task(bind=True, name="get-pdf-for-templated-letter", max_retries=15, default_retry_delay=300)
def get_pdf_for_templated_letter(self, notification_id):
try:
notification = get_notification_by_id(notification_id, _raise=True)
letter_filename = generate_letter_pdf_filename(
reference=notification.reference,
created_at=notification.created_at,
ignore_folder=notification.key_type == KEY_TYPE_TEST,
postage=notification.postage,
)
letter_data = {
"letter_contact_block": notification.reply_to_text,
"template": {
"subject": notification.template.subject,
"content": notification.template.content,
"template_type": notification.template.template_type,
},
"values": notification.personalisation,
"logo_filename": notification.service.letter_branding and notification.service.letter_branding.filename,
"letter_filename": letter_filename,
"notification_id": str(notification_id),
"key_type": notification.key_type,
}
encrypted_data = encryption.encrypt(letter_data)
notify_celery.send_task(
name=TaskNames.CREATE_PDF_FOR_TEMPLATED_LETTER, args=(encrypted_data,), queue=QueueNames.SANITISE_LETTERS
)
except Exception as e:
try:
current_app.logger.exception(
f"RETRY: calling create-letter-pdf task for notification {notification_id} failed"
)
self.retry(exc=e, queue=QueueNames.RETRY)
except self.MaxRetriesExceededError:
message = (
f"RETRY FAILED: Max retries reached. "
f"The task create-letter-pdf failed for notification id {notification_id}. "
f"Notification has been updated to technical-failure"
)
update_notification_status_by_id(notification_id, NOTIFICATION_TECHNICAL_FAILURE)
raise NotificationTechnicalFailureException(message)
@notify_celery.task(bind=True, name="update-billable-units-for-letter", max_retries=15, default_retry_delay=300)
def update_billable_units_for_letter(self, notification_id, page_count):
notification = get_notification_by_id(notification_id, _raise=True)
billable_units = get_billable_units_for_letter_page_count(page_count)
if notification.key_type != KEY_TYPE_TEST:
notification.billable_units = billable_units
dao_update_notification(notification)
current_app.logger.info(
f"Letter notification id: {notification_id} reference {notification.reference}: "
f"billable units set to {billable_units}"
)
@notify_celery.task(
bind=True, name="update-validation-failed-for-templated-letter", max_retries=15, default_retry_delay=300
)
def update_validation_failed_for_templated_letter(self, notification_id, page_count):
notification = get_notification_by_id(notification_id, _raise=True)
notification.status = NOTIFICATION_VALIDATION_FAILED
dao_update_notification(notification)
current_app.logger.info(f"Validation failed: letter is too long {page_count} for letter with id: {notification_id}")
@notify_celery.task(name="collate-letter-pdfs-to-be-sent")
@cronitor("collate-letter-pdfs-to-be-sent")
def collate_letter_pdfs_to_be_sent():
"""
Finds all letters which are still waiting to be sent to DVLA for printing
This would usually be run at 5.50pm and collect up letters created between before 5:30pm today
that have not yet been sent.
If run after midnight, it will collect up letters created before 5:30pm the day before.
"""
print_run_date = convert_utc_to_bst(datetime.utcnow())
if print_run_date.time() < LETTER_PROCESSING_DEADLINE:
print_run_date = print_run_date - timedelta(days=1)
print_run_deadline = print_run_date.replace(hour=17, minute=30, second=0, microsecond=0)
_get_letters_and_sheets_volumes_and_send_to_dvla(print_run_deadline)
for postage in POSTAGE_TYPES:
current_app.logger.info(f"starting collate-letter-pdfs-to-be-sent processing for postage class {postage}")
letters_to_print = get_key_and_size_of_letters_to_be_sent_to_print(print_run_deadline, postage)
for i, letters in enumerate(group_letters(letters_to_print)):
filenames = [letter["Key"] for letter in letters]
service_id = letters[0]["ServiceId"]
organisation_id = letters[0]["OrganisationId"]
hash = urlsafe_b64encode(sha512("".join(filenames).encode()).digest())[:20].decode()
# eg NOTIFY.2018-12-31.001.Wjrui5nAvObjPd-3GEL-.ZIP
dvla_filename = "NOTIFY.{date}.{postage}.{num:03}.{hash}.{service_id}.{organisation_id}.ZIP".format(
date=print_run_deadline.strftime("%Y-%m-%d"),
postage=RESOLVE_POSTAGE_FOR_FILE_NAME[postage],
num=i + 1,
hash=hash,
service_id=service_id,
organisation_id=organisation_id,
)
current_app.logger.info(
"Calling task zip-and-send-letter-pdfs for {} pdfs to upload {} with total size {:,} bytes".format(
len(filenames), dvla_filename, sum(letter["Size"] for letter in letters)
)
)
notify_celery.send_task(
name=TaskNames.ZIP_AND_SEND_LETTER_PDFS,
kwargs={"filenames_to_zip": filenames, "upload_filename": dvla_filename},
queue=QueueNames.PROCESS_FTP,
compression="zlib",
)
current_app.logger.info(f"finished collate-letter-pdfs-to-be-sent processing for postage class {postage}")
current_app.logger.info("finished collate-letter-pdfs-to-be-sent")
def _get_letters_and_sheets_volumes_and_send_to_dvla(print_run_deadline):
letters_volumes = dao_get_letters_and_sheets_volume_by_postage(print_run_deadline)
send_letters_volume_email_to_dvla(letters_volumes, print_run_deadline.date())
def send_letters_volume_email_to_dvla(letters_volumes, date):
personalisation = {
"total_volume": 0,
"first_class_volume": 0,
"second_class_volume": 0,
"international_volume": 0,
"total_sheets": 0,
"first_class_sheets": 0,
"second_class_sheets": 0,
"international_sheets": 0,
"date": date.strftime("%d %B %Y"),
}
for item in letters_volumes:
personalisation["total_volume"] += item.letters_count
personalisation["total_sheets"] += item.sheets_count
if f"{item.postage}_class_volume" in personalisation:
personalisation[f"{item.postage}_class_volume"] = item.letters_count
personalisation[f"{item.postage}_class_sheets"] = item.sheets_count
else:
personalisation["international_volume"] += item.letters_count
personalisation["international_sheets"] += item.sheets_count
template = dao_get_template_by_id(current_app.config["LETTERS_VOLUME_EMAIL_TEMPLATE_ID"])
recipients = current_app.config["DVLA_EMAIL_ADDRESSES"]
reply_to = template.service.get_default_reply_to_email_address()
service = Service.query.get(current_app.config["NOTIFY_SERVICE_ID"])
# avoid circular imports:
from app.notifications.process_notifications import (
persist_notification,
send_notification_to_queue,
)
for recipient in recipients:
saved_notification = persist_notification(
template_id=template.id,
template_version=template.version,
recipient=recipient,
service=service,
personalisation=personalisation,
notification_type=template.template_type,
api_key_id=None,
key_type=KEY_TYPE_NORMAL,
reply_to_text=reply_to,
)
send_notification_to_queue(saved_notification, False, queue=QueueNames.NOTIFY)
def get_key_and_size_of_letters_to_be_sent_to_print(print_run_deadline, postage):
letters_awaiting_sending = dao_get_letters_to_be_printed(print_run_deadline, postage)
for letter in letters_awaiting_sending:
try:
letter_pdf = find_letter_pdf_in_s3(letter)
yield {
"Key": letter_pdf.key,
"Size": letter_pdf.size,
"ServiceId": str(letter.service_id),
"OrganisationId": str(letter.service.organisation_id),
}
except (BotoClientError, LetterPDFNotFound) as e:
current_app.logger.exception(
f"Error getting letter from bucket for notification: {letter.id} with reference: {letter.reference}", e
)
def group_letters(letter_pdfs):
"""
Group letters in chunks of MAX_LETTER_PDF_ZIP_FILESIZE. Will add files to lists, never going over that size.
If a single file is (somehow) larger than MAX_LETTER_PDF_ZIP_FILESIZE that'll be in a list on it's own.
If there are no files, will just exit (rather than yielding an empty list).
"""
running_filesize = 0
list_of_files = []
service_id = None
for letter in letter_pdfs:
if letter["Key"].lower().endswith(".pdf"):
if not service_id:
service_id = letter["ServiceId"]
if (
running_filesize + letter["Size"] > current_app.config["MAX_LETTER_PDF_ZIP_FILESIZE"]
or len(list_of_files) >= current_app.config["MAX_LETTER_PDF_COUNT_PER_ZIP"]
or letter["ServiceId"] != service_id
):
yield list_of_files
running_filesize = 0
list_of_files = []
service_id = None
if not service_id:
service_id = letter["ServiceId"]
running_filesize += letter["Size"]
list_of_files.append(letter)
if list_of_files:
yield list_of_files
@notify_celery.task(bind=True, name="sanitise-letter", max_retries=15, default_retry_delay=300)
def sanitise_letter(self, filename):
try:
reference = get_reference_from_filename(filename)
notification = dao_get_notification_by_reference(reference)
current_app.logger.info("Notification ID {} Virus scan passed: {}".format(notification.id, filename))
if notification.status != NOTIFICATION_PENDING_VIRUS_CHECK:
current_app.logger.info(
"Sanitise letter called for notification {} which is in {} state".format(
notification.id, notification.status
)
)
return
notify_celery.send_task(
name=TaskNames.SANITISE_LETTER,
kwargs={
"notification_id": str(notification.id),
"filename": filename,
"allow_international_letters": notification.service.has_permission(INTERNATIONAL_LETTERS),
},
queue=QueueNames.SANITISE_LETTERS,
)
except Exception:
try:
current_app.logger.exception(
"RETRY: calling sanitise_letter task for notification {} failed".format(notification.id)
)
self.retry(queue=QueueNames.RETRY)
except self.MaxRetriesExceededError:
message = (
"RETRY FAILED: Max retries reached. "
"The task sanitise_letter failed for notification {}. "
"Notification has been updated to technical-failure".format(notification.id)
)
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
raise NotificationTechnicalFailureException(message)
@notify_celery.task(bind=True, name="process-sanitised-letter", max_retries=15, default_retry_delay=300)
def process_sanitised_letter(self, sanitise_data):
letter_details = encryption.decrypt(sanitise_data)
filename = letter_details["filename"]
notification_id = letter_details["notification_id"]
current_app.logger.info("Processing sanitised letter with id {}".format(notification_id))
notification = get_notification_by_id(notification_id, _raise=True)
if notification.status != NOTIFICATION_PENDING_VIRUS_CHECK:
current_app.logger.info(
"process-sanitised-letter task called for notification {} which is in {} state".format(
notification.id, notification.status
)
)
return
try:
original_pdf_object = s3.get_s3_object(current_app.config["LETTERS_SCAN_BUCKET_NAME"], filename)
if letter_details["validation_status"] == "failed":
current_app.logger.info(
"Processing invalid precompiled pdf with id {} (file {})".format(notification_id, filename)
)
_move_invalid_letter_and_update_status(
notification=notification,
filename=filename,
scan_pdf_object=original_pdf_object,
message=letter_details["message"],
invalid_pages=letter_details["invalid_pages"],
page_count=letter_details["page_count"],
)
return
current_app.logger.info(
"Processing valid precompiled pdf with id {} (file {})".format(notification_id, filename)
)
billable_units = get_billable_units_for_letter_page_count(letter_details["page_count"])
is_test_key = notification.key_type == KEY_TYPE_TEST
# Updating the notification needs to happen before the file is moved. This is so that if updating the
# notification fails, the task can retry because the file is in the same place.
update_letter_pdf_status(
reference=notification.reference,
status=NOTIFICATION_DELIVERED if is_test_key else NOTIFICATION_CREATED,
billable_units=billable_units,
recipient_address=letter_details["address"],
)
# The original filename could be wrong because we didn't know the postage.
# Now we know if the letter is international, we can check what the filename should be.
upload_file_name = generate_letter_pdf_filename(
reference=notification.reference,
created_at=notification.created_at,
ignore_folder=True,
postage=notification.postage,
)
move_sanitised_letter_to_test_or_live_pdf_bucket(
filename,
is_test_key,
notification.created_at,
upload_file_name,
)
# We've moved the sanitised PDF from the sanitise bucket, but still need to delete the original file:
original_pdf_object.delete()
except BotoClientError:
# Boto exceptions are likely to be caused by the file(s) being in the wrong place, so retrying won't help -
# we'll need to manually investigate
current_app.logger.exception(
f"Boto error when processing sanitised letter for notification {notification.id} (file {filename})"
)
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
raise NotificationTechnicalFailureException
except Exception:
try:
current_app.logger.exception(
"RETRY: calling process_sanitised_letter task for notification {} failed".format(notification.id)
)
self.retry(queue=QueueNames.RETRY)
except self.MaxRetriesExceededError:
message = (
"RETRY FAILED: Max retries reached. "
"The task process_sanitised_letter failed for notification {}. "
"Notification has been updated to technical-failure".format(notification.id)
)
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
raise NotificationTechnicalFailureException(message)
def _move_invalid_letter_and_update_status(
*, notification, filename, scan_pdf_object, message=None, invalid_pages=None, page_count=None
):
try:
move_scan_to_invalid_pdf_bucket(
source_filename=filename, message=message, invalid_pages=invalid_pages, page_count=page_count
)
scan_pdf_object.delete()
update_letter_pdf_status(
reference=notification.reference, status=NOTIFICATION_VALIDATION_FAILED, billable_units=0
)
except BotoClientError:
current_app.logger.exception(
"Error when moving letter with id {} to invalid PDF bucket".format(notification.id)
)
update_notification_status_by_id(notification.id, NOTIFICATION_TECHNICAL_FAILURE)
raise NotificationTechnicalFailureException
@notify_celery.task(name="process-virus-scan-failed")
def process_virus_scan_failed(filename):
move_failed_pdf(filename, ScanErrorType.FAILURE)
reference = get_reference_from_filename(filename)
notification = dao_get_notification_by_reference(reference)
updated_count = update_letter_pdf_status(reference, NOTIFICATION_VIRUS_SCAN_FAILED, billable_units=0)
if updated_count != 1:
raise Exception(
"There should only be one letter notification for each reference. Found {} notifications".format(
updated_count
)
)
error = VirusScanError("notification id {} Virus scan failed: {}".format(notification.id, filename))
current_app.logger.exception(error)
raise error
@notify_celery.task(name="process-virus-scan-error")
def process_virus_scan_error(filename):
move_failed_pdf(filename, ScanErrorType.ERROR)
reference = get_reference_from_filename(filename)
notification = dao_get_notification_by_reference(reference)
updated_count = update_letter_pdf_status(reference, NOTIFICATION_TECHNICAL_FAILURE, billable_units=0)
if updated_count != 1:
raise Exception(
"There should only be one letter notification for each reference. Found {} notifications".format(
updated_count
)
)
error = VirusScanError("notification id {} Virus scan error: {}".format(notification.id, filename))
current_app.logger.exception(error)
raise error
def update_letter_pdf_status(reference, status, billable_units, recipient_address=None):
postage = None
if recipient_address:
# fix allow_international_letters
postage = PostalAddress(
raw_address=recipient_address.replace(",", "\n"), allow_international_letters=True
).postage
postage = postage if postage in INTERNATIONAL_POSTAGE_TYPES else None
update_dict = {"status": status, "billable_units": billable_units, "updated_at": datetime.utcnow()}
if postage:
update_dict.update({"postage": postage, "international": True})
if recipient_address:
update_dict["to"] = recipient_address
update_dict["normalised_to"] = "".join(recipient_address.split()).lower()
return dao_update_notifications_by_reference(references=[reference], update_dict=update_dict)[0]
def replay_letters_in_error(filename=None):
# This method can be used to replay letters that end up in the ERROR directory.
# We had an incident where clamAV was not processing the virus scan.
if filename:
move_error_pdf_to_scan_bucket(filename)
# call task to add the filename to anti virus queue
current_app.logger.info("Calling scan_file for: {}".format(filename))
if current_app.config["ANTIVIRUS_ENABLED"]:
notify_celery.send_task(
name=TaskNames.SCAN_FILE,
kwargs={"filename": filename},
queue=QueueNames.ANTIVIRUS,
)
else:
# stub out antivirus in dev
sanitise_letter.apply_async([filename], queue=QueueNames.LETTERS)
else:
error_files = get_file_names_from_error_bucket()
for item in error_files:
moved_file_name = item.key.split("/")[1]
current_app.logger.info("Calling scan_file for: {}".format(moved_file_name))
move_error_pdf_to_scan_bucket(moved_file_name)
# call task to add the filename to anti virus queue
if current_app.config["ANTIVIRUS_ENABLED"]:
notify_celery.send_task(
name=TaskNames.SCAN_FILE,
kwargs={"filename": moved_file_name},
queue=QueueNames.ANTIVIRUS,
)
else:
# stub out antivirus in dev
sanitise_letter.apply_async([filename], queue=QueueNames.LETTERS)
@notify_celery.task(name="resanitise-pdf")
def resanitise_pdf(notification_id):
"""
`notification_id` is the notification id for a PDF letter which was either uploaded or sent using the API.
This task calls the `recreate_pdf_for_precompiled_letter` template preview task which recreates the
PDF for a letter which is already sanitised and in the letters-pdf bucket. The new file that is generated
will then overwrite the existing letter in the letters-pdf bucket.
"""
notification = get_notification_by_id(notification_id)
# folder_name is the folder that the letter is in the letters-pdf bucket e.g. '2021-10-10/'
folder_name = get_folder_name(notification.created_at)
filename = generate_letter_pdf_filename(
reference=notification.reference,
created_at=notification.created_at,
ignore_folder=True,
postage=notification.postage,
)
notify_celery.send_task(
name=TaskNames.RECREATE_PDF_FOR_PRECOMPILED_LETTER,
kwargs={
"notification_id": str(notification.id),
"file_location": f"{folder_name}{filename}",
"allow_international_letters": notification.service.has_permission(INTERNATIONAL_LETTERS),
},
queue=QueueNames.SANITISE_LETTERS,
)
|
{
"content_hash": "682c324169cdda520dc5597624489379",
"timestamp": "",
"source": "github",
"line_count": 559,
"max_line_length": 120,
"avg_line_length": 42.658318425760285,
"alnum_prop": 0.6532751824205317,
"repo_name": "alphagov/notifications-api",
"id": "6851b9f6b814d3bcd3afb2286ac86d4dc5f50a3a",
"size": "23846",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "app/celery/letters_pdf_tasks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "719"
},
{
"name": "Jinja",
"bytes": "5543"
},
{
"name": "Makefile",
"bytes": "6627"
},
{
"name": "Mako",
"bytes": "361"
},
{
"name": "Procfile",
"bytes": "35"
},
{
"name": "Python",
"bytes": "3506225"
},
{
"name": "Shell",
"bytes": "13179"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from homes_to_let.factories.letting_factory import LettingFactory
from homes_to_let.factories.letting_feature_factory import LettingFeatureFactory
from homes_to_let.factories.letting_picture_factory import LettingPictureFactory
from homes_to_let.factories.letting_media_factory import LettingMediaFactory
from homes_to_let.factories.letting_contact_factory import LettingContactFactory
from homes_to_let.factories.letting_note_factory import LettingNoteFactory
from homes_to_let.factories.letting_favourite_factory import LettingFavouriteFactory
from homes_to_let.models import *
class LettingModelTestCase(TestCase):
def test_string_representation(self):
sale = LettingFactory()
self.assertEquals(str(sale), sale.title)
class LettingFeatureModelTestCase(TestCase):
def test_string_representation(self):
feature = LettingFeatureFactory()
self.assertEquals(str(feature), feature.text)
class LettingPictureModelTestCase(TestCase):
def test_string_representation(self):
picture = LettingPictureFactory()
self.assertEquals(str(picture), picture.caption)
class LettingMediaModelTestCase(TestCase):
def test_string_representation(self):
media = LettingMediaFactory()
self.assertEquals(str(media), media.description)
class LettingContactModelTestCase(TestCase):
def test_string_representation(self):
contact = LettingContactFactory()
self.assertEquals(str(contact), "%s %s (%s)" % (contact.forename, contact.surname, contact.email))
class LettingNoteModelTestCase(TestCase):
def test_string_representation(self):
note = LettingNoteFactory()
self.assertEquals(str(note), note.text)
class LettingFavouriteModelTestCase(TestCase):
def test_string_representation(self):
favourite = LettingFavouriteFactory()
self.assertEquals(str(favourite), favourite.user.username)
|
{
"content_hash": "1473bd86c1bec56ff30d7b777cea8e46",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 106,
"avg_line_length": 32.53333333333333,
"alnum_prop": 0.766905737704918,
"repo_name": "signalfire/django-property",
"id": "b86d1e66e6e4a744b1ab1d5b0363f829f24af85c",
"size": "1952",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "homes_to_let/tests/test_models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10580"
},
{
"name": "HTML",
"bytes": "65468"
},
{
"name": "JavaScript",
"bytes": "43866"
},
{
"name": "Python",
"bytes": "177600"
}
],
"symlink_target": ""
}
|
import angr
class tzset(angr.SimProcedure):
# emulate as a no-op
# important because on my libc this contains inlined iolib ops and thus can't be executed when simprocs are enabled.
def run(self): # pylint: disable=arguments-differ
pass
|
{
"content_hash": "fc6edc2a8894c2674788c423c2b6bc05",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 120,
"avg_line_length": 37,
"alnum_prop": 0.7181467181467182,
"repo_name": "angr/angr",
"id": "788d22ea8c9f25f24024d8cb058cbb9afcbb0bcc",
"size": "259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "angr/procedures/posix/tz.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6694"
},
{
"name": "C++",
"bytes": "146292"
},
{
"name": "Makefile",
"bytes": "946"
},
{
"name": "Python",
"bytes": "27717304"
}
],
"symlink_target": ""
}
|
from dataclasses import dataclass
class MissingSecretError(RuntimeError):
pass
def strip_keys_not_in_dataclass(data_dict: dict, dc: dataclass) -> dict:
"""Return a copy of `data_dict` removing any keys that are not
properties of the dataclass dc.
"""
return {k: v for k, v in data_dict.items() if k in dc.__dataclass_fields__.keys()}
|
{
"content_hash": "10b44edf14f6f7ef2820fb2030f95a9e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 86,
"avg_line_length": 29.833333333333332,
"alnum_prop": 0.6927374301675978,
"repo_name": "potatolondon/djangae",
"id": "23dd47d8cf318c8f2ba5a3c0c36e1e11f01519fe",
"size": "358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djangae/contrib/secrets/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1620"
},
{
"name": "Python",
"bytes": "414429"
}
],
"symlink_target": ""
}
|
import datetime
import io
from os import linesep
import re
import sys
from pip._vendor.toml.tz import TomlTz
if sys.version_info < (3,):
_range = xrange # noqa: F821
else:
unicode = str
_range = range
basestring = str
unichr = chr
def _detect_pathlib_path(p):
if (3, 4) <= sys.version_info:
import pathlib
if isinstance(p, pathlib.PurePath):
return True
return False
def _ispath(p):
if isinstance(p, basestring):
return True
return _detect_pathlib_path(p)
def _getpath(p):
if (3, 6) <= sys.version_info:
import os
return os.fspath(p)
if _detect_pathlib_path(p):
return str(p)
return p
try:
FNFError = FileNotFoundError
except NameError:
FNFError = IOError
TIME_RE = re.compile("([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?")
class TomlDecodeError(ValueError):
"""Base toml Exception / Error."""
def __init__(self, msg, doc, pos):
lineno = doc.count('\n', 0, pos) + 1
colno = pos - doc.rfind('\n', 0, pos)
emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos)
ValueError.__init__(self, emsg)
self.msg = msg
self.doc = doc
self.pos = pos
self.lineno = lineno
self.colno = colno
# Matches a TOML number, which allows underscores for readability
_number_with_underscores = re.compile('([0-9])(_([0-9]))*')
def _strictly_valid_num(n):
n = n.strip()
if not n:
return False
if n[0] == '_':
return False
if n[-1] == '_':
return False
if "_." in n or "._" in n:
return False
if len(n) == 1:
return True
if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']:
return False
if n[0] == '+' or n[0] == '-':
n = n[1:]
if len(n) > 1 and n[0] == '0' and n[1] != '.':
return False
if '__' in n:
return False
return True
def load(f, _dict=dict, decoder=None):
"""Parses named file or files as toml and returns a dictionary
Args:
f: Path to the file to open, array of files to read into single dict
or a file descriptor
_dict: (optional) Specifies the class of the returned toml dictionary
Returns:
Parsed toml file represented as a dictionary
Raises:
TypeError -- When f is invalid type
TomlDecodeError: Error while decoding toml
IOError / FileNotFoundError -- When an array with no valid (existing)
(Python 2 / Python 3) file paths is passed
"""
if _ispath(f):
with io.open(_getpath(f), encoding='utf-8') as ffile:
return loads(ffile.read(), _dict, decoder)
elif isinstance(f, list):
from os import path as op
from warnings import warn
if not [path for path in f if op.exists(path)]:
error_msg = "Load expects a list to contain filenames only."
error_msg += linesep
error_msg += ("The list needs to contain the path of at least one "
"existing file.")
raise FNFError(error_msg)
if decoder is None:
decoder = TomlDecoder()
d = decoder.get_empty_table()
for l in f:
if op.exists(l):
d.update(load(l, _dict, decoder))
else:
warn("Non-existent filename in list with at least one valid "
"filename")
return d
else:
try:
return loads(f.read(), _dict, decoder)
except AttributeError:
raise TypeError("You can only load a file descriptor, filename or "
"list")
_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$')
def loads(s, _dict=dict, decoder=None):
"""Parses string as toml
Args:
s: String to be parsed
_dict: (optional) Specifies the class of the returned toml dictionary
Returns:
Parsed toml file represented as a dictionary
Raises:
TypeError: When a non-string is passed
TomlDecodeError: Error while decoding toml
"""
implicitgroups = []
if decoder is None:
decoder = TomlDecoder(_dict)
retval = decoder.get_empty_table()
currentlevel = retval
if not isinstance(s, basestring):
raise TypeError("Expecting something like a string")
if not isinstance(s, unicode):
s = s.decode('utf8')
original = s
sl = list(s)
openarr = 0
openstring = False
openstrchar = ""
multilinestr = False
arrayoftables = False
beginline = True
keygroup = False
dottedkey = False
keyname = 0
for i, item in enumerate(sl):
if item == '\r' and sl[i + 1] == '\n':
sl[i] = ' '
continue
if keyname:
if item == '\n':
raise TomlDecodeError("Key name found without value."
" Reached end of line.", original, i)
if openstring:
if item == openstrchar:
keyname = 2
openstring = False
openstrchar = ""
continue
elif keyname == 1:
if item.isspace():
keyname = 2
continue
elif item == '.':
dottedkey = True
continue
elif item.isalnum() or item == '_' or item == '-':
continue
elif (dottedkey and sl[i - 1] == '.' and
(item == '"' or item == "'")):
openstring = True
openstrchar = item
continue
elif keyname == 2:
if item.isspace():
if dottedkey:
nextitem = sl[i + 1]
if not nextitem.isspace() and nextitem != '.':
keyname = 1
continue
if item == '.':
dottedkey = True
nextitem = sl[i + 1]
if not nextitem.isspace() and nextitem != '.':
keyname = 1
continue
if item == '=':
keyname = 0
dottedkey = False
else:
raise TomlDecodeError("Found invalid character in key name: '" +
item + "'. Try quoting the key name.",
original, i)
if item == "'" and openstrchar != '"':
k = 1
try:
while sl[i - k] == "'":
k += 1
if k == 3:
break
except IndexError:
pass
if k == 3:
multilinestr = not multilinestr
openstring = multilinestr
else:
openstring = not openstring
if openstring:
openstrchar = "'"
else:
openstrchar = ""
if item == '"' and openstrchar != "'":
oddbackslash = False
k = 1
tripquote = False
try:
while sl[i - k] == '"':
k += 1
if k == 3:
tripquote = True
break
if k == 1 or (k == 3 and tripquote):
while sl[i - k] == '\\':
oddbackslash = not oddbackslash
k += 1
except IndexError:
pass
if not oddbackslash:
if tripquote:
multilinestr = not multilinestr
openstring = multilinestr
else:
openstring = not openstring
if openstring:
openstrchar = '"'
else:
openstrchar = ""
if item == '#' and (not openstring and not keygroup and
not arrayoftables):
j = i
try:
while sl[j] != '\n':
sl[j] = ' '
j += 1
except IndexError:
break
if item == '[' and (not openstring and not keygroup and
not arrayoftables):
if beginline:
if len(sl) > i + 1 and sl[i + 1] == '[':
arrayoftables = True
else:
keygroup = True
else:
openarr += 1
if item == ']' and not openstring:
if keygroup:
keygroup = False
elif arrayoftables:
if sl[i - 1] == ']':
arrayoftables = False
else:
openarr -= 1
if item == '\n':
if openstring or multilinestr:
if not multilinestr:
raise TomlDecodeError("Unbalanced quotes", original, i)
if ((sl[i - 1] == "'" or sl[i - 1] == '"') and (
sl[i - 2] == sl[i - 1])):
sl[i] = sl[i - 1]
if sl[i - 3] == sl[i - 1]:
sl[i - 3] = ' '
elif openarr:
sl[i] = ' '
else:
beginline = True
elif beginline and sl[i] != ' ' and sl[i] != '\t':
beginline = False
if not keygroup and not arrayoftables:
if sl[i] == '=':
raise TomlDecodeError("Found empty keyname. ", original, i)
keyname = 1
s = ''.join(sl)
s = s.split('\n')
multikey = None
multilinestr = ""
multibackslash = False
pos = 0
for idx, line in enumerate(s):
if idx > 0:
pos += len(s[idx - 1]) + 1
if not multilinestr or multibackslash or '\n' not in multilinestr:
line = line.strip()
if line == "" and (not multikey or multibackslash):
continue
if multikey:
if multibackslash:
multilinestr += line
else:
multilinestr += line
multibackslash = False
if len(line) > 2 and (line[-1] == multilinestr[0] and
line[-2] == multilinestr[0] and
line[-3] == multilinestr[0]):
try:
value, vtype = decoder.load_value(multilinestr)
except ValueError as err:
raise TomlDecodeError(str(err), original, pos)
currentlevel[multikey] = value
multikey = None
multilinestr = ""
else:
k = len(multilinestr) - 1
while k > -1 and multilinestr[k] == '\\':
multibackslash = not multibackslash
k -= 1
if multibackslash:
multilinestr = multilinestr[:-1]
else:
multilinestr += "\n"
continue
if line[0] == '[':
arrayoftables = False
if len(line) == 1:
raise TomlDecodeError("Opening key group bracket on line by "
"itself.", original, pos)
if line[1] == '[':
arrayoftables = True
line = line[2:]
splitstr = ']]'
else:
line = line[1:]
splitstr = ']'
i = 1
quotesplits = decoder._get_split_on_quotes(line)
quoted = False
for quotesplit in quotesplits:
if not quoted and splitstr in quotesplit:
break
i += quotesplit.count(splitstr)
quoted = not quoted
line = line.split(splitstr, i)
if len(line) < i + 1 or line[-1].strip() != "":
raise TomlDecodeError("Key group not on a line by itself.",
original, pos)
groups = splitstr.join(line[:-1]).split('.')
i = 0
while i < len(groups):
groups[i] = groups[i].strip()
if len(groups[i]) > 0 and (groups[i][0] == '"' or
groups[i][0] == "'"):
groupstr = groups[i]
j = i + 1
while not groupstr[0] == groupstr[-1]:
j += 1
if j > len(groups) + 2:
raise TomlDecodeError("Invalid group name '" +
groupstr + "' Something " +
"went wrong.", original, pos)
groupstr = '.'.join(groups[i:j]).strip()
groups[i] = groupstr[1:-1]
groups[i + 1:j] = []
else:
if not _groupname_re.match(groups[i]):
raise TomlDecodeError("Invalid group name '" +
groups[i] + "'. Try quoting it.",
original, pos)
i += 1
currentlevel = retval
for i in _range(len(groups)):
group = groups[i]
if group == "":
raise TomlDecodeError("Can't have a keygroup with an empty "
"name", original, pos)
try:
currentlevel[group]
if i == len(groups) - 1:
if group in implicitgroups:
implicitgroups.remove(group)
if arrayoftables:
raise TomlDecodeError("An implicitly defined "
"table can't be an array",
original, pos)
elif arrayoftables:
currentlevel[group].append(decoder.get_empty_table()
)
else:
raise TomlDecodeError("What? " + group +
" already exists?" +
str(currentlevel),
original, pos)
except TypeError:
currentlevel = currentlevel[-1]
if group not in currentlevel:
currentlevel[group] = decoder.get_empty_table()
if i == len(groups) - 1 and arrayoftables:
currentlevel[group] = [decoder.get_empty_table()]
except KeyError:
if i != len(groups) - 1:
implicitgroups.append(group)
currentlevel[group] = decoder.get_empty_table()
if i == len(groups) - 1 and arrayoftables:
currentlevel[group] = [decoder.get_empty_table()]
currentlevel = currentlevel[group]
if arrayoftables:
try:
currentlevel = currentlevel[-1]
except KeyError:
pass
elif line[0] == "{":
if line[-1] != "}":
raise TomlDecodeError("Line breaks are not allowed in inline"
"objects", original, pos)
try:
decoder.load_inline_object(line, currentlevel, multikey,
multibackslash)
except ValueError as err:
raise TomlDecodeError(str(err), original, pos)
elif "=" in line:
try:
ret = decoder.load_line(line, currentlevel, multikey,
multibackslash)
except ValueError as err:
raise TomlDecodeError(str(err), original, pos)
if ret is not None:
multikey, multilinestr, multibackslash = ret
return retval
def _load_date(val):
microsecond = 0
tz = None
try:
if len(val) > 19:
if val[19] == '.':
if val[-1].upper() == 'Z':
subsecondval = val[20:-1]
tzval = "Z"
else:
subsecondvalandtz = val[20:]
if '+' in subsecondvalandtz:
splitpoint = subsecondvalandtz.index('+')
subsecondval = subsecondvalandtz[:splitpoint]
tzval = subsecondvalandtz[splitpoint:]
elif '-' in subsecondvalandtz:
splitpoint = subsecondvalandtz.index('-')
subsecondval = subsecondvalandtz[:splitpoint]
tzval = subsecondvalandtz[splitpoint:]
else:
tzval = None
subsecondval = subsecondvalandtz
if tzval is not None:
tz = TomlTz(tzval)
microsecond = int(int(subsecondval) *
(10 ** (6 - len(subsecondval))))
else:
tz = TomlTz(val[19:])
except ValueError:
tz = None
if "-" not in val[1:]:
return None
try:
if len(val) == 10:
d = datetime.date(
int(val[:4]), int(val[5:7]),
int(val[8:10]))
else:
d = datetime.datetime(
int(val[:4]), int(val[5:7]),
int(val[8:10]), int(val[11:13]),
int(val[14:16]), int(val[17:19]), microsecond, tz)
except ValueError:
return None
return d
def _load_unicode_escapes(v, hexbytes, prefix):
skip = False
i = len(v) - 1
while i > -1 and v[i] == '\\':
skip = not skip
i -= 1
for hx in hexbytes:
if skip:
skip = False
i = len(hx) - 1
while i > -1 and hx[i] == '\\':
skip = not skip
i -= 1
v += prefix
v += hx
continue
hxb = ""
i = 0
hxblen = 4
if prefix == "\\U":
hxblen = 8
hxb = ''.join(hx[i:i + hxblen]).lower()
if hxb.strip('0123456789abcdef'):
raise ValueError("Invalid escape sequence: " + hxb)
if hxb[0] == "d" and hxb[1].strip('01234567'):
raise ValueError("Invalid escape sequence: " + hxb +
". Only scalar unicode points are allowed.")
v += unichr(int(hxb, 16))
v += unicode(hx[len(hxb):])
return v
# Unescape TOML string values.
# content after the \
_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"']
# What it should be replaced by
_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"']
# Used for substitution
_escape_to_escapedchars = dict(zip(_escapes, _escapedchars))
def _unescape(v):
"""Unescape characters in a TOML string."""
i = 0
backslash = False
while i < len(v):
if backslash:
backslash = False
if v[i] in _escapes:
v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:]
elif v[i] == '\\':
v = v[:i - 1] + v[i:]
elif v[i] == 'u' or v[i] == 'U':
i += 1
else:
raise ValueError("Reserved escape sequence used")
continue
elif v[i] == '\\':
backslash = True
i += 1
return v
class InlineTableDict(object):
"""Sentinel subclass of dict for inline tables."""
class TomlDecoder(object):
def __init__(self, _dict=dict):
self._dict = _dict
def get_empty_table(self):
return self._dict()
def get_empty_inline_table(self):
class DynamicInlineTableDict(self._dict, InlineTableDict):
"""Concrete sentinel subclass for inline tables.
It is a subclass of _dict which is passed in dynamically at load
time
It is also a subclass of InlineTableDict
"""
return DynamicInlineTableDict()
def load_inline_object(self, line, currentlevel, multikey=False,
multibackslash=False):
candidate_groups = line[1:-1].split(",")
groups = []
if len(candidate_groups) == 1 and not candidate_groups[0].strip():
candidate_groups.pop()
while len(candidate_groups) > 0:
candidate_group = candidate_groups.pop(0)
try:
_, value = candidate_group.split('=', 1)
except ValueError:
raise ValueError("Invalid inline table encountered")
value = value.strip()
if ((value[0] == value[-1] and value[0] in ('"', "'")) or (
value[0] in '-0123456789' or
value in ('true', 'false') or
(value[0] == "[" and value[-1] == "]") or
(value[0] == '{' and value[-1] == '}'))):
groups.append(candidate_group)
elif len(candidate_groups) > 0:
candidate_groups[0] = (candidate_group + "," +
candidate_groups[0])
else:
raise ValueError("Invalid inline table value encountered")
for group in groups:
status = self.load_line(group, currentlevel, multikey,
multibackslash)
if status is not None:
break
def _get_split_on_quotes(self, line):
doublequotesplits = line.split('"')
quoted = False
quotesplits = []
if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]:
singlequotesplits = doublequotesplits[0].split("'")
doublequotesplits = doublequotesplits[1:]
while len(singlequotesplits) % 2 == 0 and len(doublequotesplits):
singlequotesplits[-1] += '"' + doublequotesplits[0]
doublequotesplits = doublequotesplits[1:]
if "'" in singlequotesplits[-1]:
singlequotesplits = (singlequotesplits[:-1] +
singlequotesplits[-1].split("'"))
quotesplits += singlequotesplits
for doublequotesplit in doublequotesplits:
if quoted:
quotesplits.append(doublequotesplit)
else:
quotesplits += doublequotesplit.split("'")
quoted = not quoted
return quotesplits
def load_line(self, line, currentlevel, multikey, multibackslash):
i = 1
quotesplits = self._get_split_on_quotes(line)
quoted = False
for quotesplit in quotesplits:
if not quoted and '=' in quotesplit:
break
i += quotesplit.count('=')
quoted = not quoted
pair = line.split('=', i)
strictly_valid = _strictly_valid_num(pair[-1])
if _number_with_underscores.match(pair[-1]):
pair[-1] = pair[-1].replace('_', '')
while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and
pair[-1][0] != "'" and pair[-1][0] != '"' and
pair[-1][0] != '[' and pair[-1][0] != '{' and
pair[-1] != 'true' and pair[-1] != 'false'):
try:
float(pair[-1])
break
except ValueError:
pass
if _load_date(pair[-1]) is not None:
break
i += 1
prev_val = pair[-1]
pair = line.split('=', i)
if prev_val == pair[-1]:
raise ValueError("Invalid date or number")
if strictly_valid:
strictly_valid = _strictly_valid_num(pair[-1])
pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()]
if '.' in pair[0]:
if '"' in pair[0] or "'" in pair[0]:
quotesplits = self._get_split_on_quotes(pair[0])
quoted = False
levels = []
for quotesplit in quotesplits:
if quoted:
levels.append(quotesplit)
else:
levels += [level.strip() for level in
quotesplit.split('.')]
quoted = not quoted
else:
levels = pair[0].split('.')
while levels[-1] == "":
levels = levels[:-1]
for level in levels[:-1]:
if level == "":
continue
if level not in currentlevel:
currentlevel[level] = self.get_empty_table()
currentlevel = currentlevel[level]
pair[0] = levels[-1].strip()
elif (pair[0][0] == '"' or pair[0][0] == "'") and \
(pair[0][-1] == pair[0][0]):
pair[0] = pair[0][1:-1]
if len(pair[1]) > 2 and ((pair[1][0] == '"' or pair[1][0] == "'") and
pair[1][1] == pair[1][0] and
pair[1][2] == pair[1][0] and
not (len(pair[1]) > 5 and
pair[1][-1] == pair[1][0] and
pair[1][-2] == pair[1][0] and
pair[1][-3] == pair[1][0])):
k = len(pair[1]) - 1
while k > -1 and pair[1][k] == '\\':
multibackslash = not multibackslash
k -= 1
if multibackslash:
multilinestr = pair[1][:-1]
else:
multilinestr = pair[1] + "\n"
multikey = pair[0]
else:
value, vtype = self.load_value(pair[1], strictly_valid)
try:
currentlevel[pair[0]]
raise ValueError("Duplicate keys!")
except TypeError:
raise ValueError("Duplicate keys!")
except KeyError:
if multikey:
return multikey, multilinestr, multibackslash
else:
currentlevel[pair[0]] = value
def load_value(self, v, strictly_valid=True):
if not v:
raise ValueError("Empty value is invalid")
if v == 'true':
return (True, "bool")
elif v == 'false':
return (False, "bool")
elif v[0] == '"' or v[0] == "'":
quotechar = v[0]
testv = v[1:].split(quotechar)
triplequote = False
triplequotecount = 0
if len(testv) > 1 and testv[0] == '' and testv[1] == '':
testv = testv[2:]
triplequote = True
closed = False
for tv in testv:
if tv == '':
if triplequote:
triplequotecount += 1
else:
closed = True
else:
oddbackslash = False
try:
i = -1
j = tv[i]
while j == '\\':
oddbackslash = not oddbackslash
i -= 1
j = tv[i]
except IndexError:
pass
if not oddbackslash:
if closed:
raise ValueError("Stuff after closed string. WTF?")
else:
if not triplequote or triplequotecount > 1:
closed = True
else:
triplequotecount = 0
if quotechar == '"':
escapeseqs = v.split('\\')[1:]
backslash = False
for i in escapeseqs:
if i == '':
backslash = not backslash
else:
if i[0] not in _escapes and (i[0] != 'u' and
i[0] != 'U' and
not backslash):
raise ValueError("Reserved escape sequence used")
if backslash:
backslash = False
for prefix in ["\\u", "\\U"]:
if prefix in v:
hexbytes = v.split(prefix)
v = _load_unicode_escapes(hexbytes[0], hexbytes[1:],
prefix)
v = _unescape(v)
if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or
v[1] == v[2]):
v = v[2:-2]
return (v[1:-1], "str")
elif v[0] == '[':
return (self.load_array(v), "array")
elif v[0] == '{':
inline_object = self.get_empty_inline_table()
self.load_inline_object(v, inline_object)
return (inline_object, "inline_object")
elif TIME_RE.match(v):
h, m, s, _, ms = TIME_RE.match(v).groups()
time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0)
return (time, "time")
else:
parsed_date = _load_date(v)
if parsed_date is not None:
return (parsed_date, "date")
if not strictly_valid:
raise ValueError("Weirdness with leading zeroes or "
"underscores in your number.")
itype = "int"
neg = False
if v[0] == '-':
neg = True
v = v[1:]
elif v[0] == '+':
v = v[1:]
v = v.replace('_', '')
lowerv = v.lower()
if '.' in v or ('x' not in v and ('e' in v or 'E' in v)):
if '.' in v and v.split('.', 1)[1] == '':
raise ValueError("This float is missing digits after "
"the point")
if v[0] not in '0123456789':
raise ValueError("This float doesn't have a leading "
"digit")
v = float(v)
itype = "float"
elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'):
v = float(v)
itype = "float"
if itype == "int":
v = int(v, 0)
if neg:
return (0 - v, itype)
return (v, itype)
def bounded_string(self, s):
if len(s) == 0:
return True
if s[-1] != s[0]:
return False
i = -2
backslash = False
while len(s) + i > 0:
if s[i] == "\\":
backslash = not backslash
i -= 1
else:
break
return not backslash
def load_array(self, a):
atype = None
retval = []
a = a.strip()
if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip():
strarray = False
tmpa = a[1:-1].strip()
if tmpa != '' and (tmpa[0] == '"' or tmpa[0] == "'"):
strarray = True
if not a[1:-1].strip().startswith('{'):
a = a[1:-1].split(',')
else:
# a is an inline object, we must find the matching parenthesis
# to define groups
new_a = []
start_group_index = 1
end_group_index = 2
in_str = False
while end_group_index < len(a[1:]):
if a[end_group_index] == '"' or a[end_group_index] == "'":
if in_str:
backslash_index = end_group_index - 1
while (backslash_index > -1 and
a[backslash_index] == '\\'):
in_str = not in_str
backslash_index -= 1
in_str = not in_str
if in_str or a[end_group_index] != '}':
end_group_index += 1
continue
# Increase end_group_index by 1 to get the closing bracket
end_group_index += 1
new_a.append(a[start_group_index:end_group_index])
# The next start index is at least after the closing
# bracket, a closing bracket can be followed by a comma
# since we are in an array.
start_group_index = end_group_index + 1
while (start_group_index < len(a[1:]) and
a[start_group_index] != '{'):
start_group_index += 1
end_group_index = start_group_index + 1
a = new_a
b = 0
if strarray:
while b < len(a) - 1:
ab = a[b].strip()
while (not self.bounded_string(ab) or
(len(ab) > 2 and
ab[0] == ab[1] == ab[2] and
ab[-2] != ab[0] and
ab[-3] != ab[0])):
a[b] = a[b] + ',' + a[b + 1]
ab = a[b].strip()
if b < len(a) - 2:
a = a[:b + 1] + a[b + 2:]
else:
a = a[:b + 1]
b += 1
else:
al = list(a[1:-1])
a = []
openarr = 0
j = 0
for i in _range(len(al)):
if al[i] == '[':
openarr += 1
elif al[i] == ']':
openarr -= 1
elif al[i] == ',' and not openarr:
a.append(''.join(al[j:i]))
j = i + 1
a.append(''.join(al[j:]))
for i in _range(len(a)):
a[i] = a[i].strip()
if a[i] != '':
nval, ntype = self.load_value(a[i])
if atype:
if ntype != atype:
raise ValueError("Not a homogeneous array")
else:
atype = ntype
retval.append(nval)
return retval
|
{
"content_hash": "71058cc794ba4a9eabd16a9b594de23a",
"timestamp": "",
"source": "github",
"line_count": 945,
"max_line_length": 80,
"avg_line_length": 37.10793650793651,
"alnum_prop": 0.4189694014315453,
"repo_name": "davidharvey1986/pyRRG",
"id": "20be459122dfc93ab8d844fd6dc964b1931a33c9",
"size": "35067",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "unittests/bugFixPyRRG/lib/python3.7/site-packages/pip/_vendor/toml/decoder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PowerShell",
"bytes": "8321"
},
{
"name": "Python",
"bytes": "5803472"
},
{
"name": "Shell",
"bytes": "3862"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import numpy as np
import sqaod as sq
N0 = 8
N1 = 8
# 1. set problem. As an example, a matrix filled with 1. is used.
b0 = np.ones(N0)
b1 = np.ones(N1)
W = np.ones((N1, N0))
# 2. choosing solver .
sol = sq.cpu # use CPU annealer
# If you want to use CUDA, check CUDA availability with sq.is_cuda_available().
if sq.is_cuda_available() :
import sqaod.cuda
sol = sqaod.cuda
# 3. instanciate solver
ann = sol.bipartite_graph_annealer()
# 4. (optional) set random seed.
ann.seed(13255)
# 5. Setting problem
# Setting W and optimize direction (minimize or maxminize)
# n_trotters is implicitly set to N/4 by default.
ann.set_qubo(b0, b1, W, sq.maximize)
# 6. set preferences,
# The following line set n_trotters is identical to the dimension of W.
ann.set_preferences(n_trotters = N0 + N1)
# altternative for 4. and 5.
# W, optimize direction and n_trotters are able to be set on instantiation.
# ann = sol.dense_graph_annealer(b0, b1, W, sq.minimize, n_trotters = W.shape[0])
# 7. get ising model paramters. (optional)
# When W and optimize dir are set, ising hamiltonian of h, J and c are caluclated.
# By using get_hamiltonian() to get these values.
h0, h1, J, c = ann.get_hamiltonian()
print('h0=', h0)
print('h1=', h1)
print('J=', J)
print('c=', c)
# 8. showing preferences (optional)
# preferences of solvers are obtained by calling get_preference().
# preferences is always repeseted as python dictionay object.
print(ann.get_preferences())
# 9. prepare to run anneal. Annealers must be prepared
# before calling randomize_spin() and anneal_one_step().
ann.prepare()
# 10. randomize or set x(0 or 1) to set the initial state (mandatory)
ann.randomize_spin()
# 11. annealing
Ginit = 5.
Gfin = 0.01
beta = 1. / 0.02
tau = 0.99
# annealing loop
G = Ginit
while Gfin <= G :
# 11. call anneal_one_step to try flipping spins for (n_bits x n_trotters) times.
ann.anneal_one_step(G, beta)
G *= tau
# 12. you may call get_E() to get a current E value.
# ann.get_E()
# 13. some methods to get results
# - Get list of E for every trogger.
E = ann.get_E()
# - Get annealed q. get_q() returns q matrix as (n_trotters, N)
qlist = ann.get_q()
# - Get annealed x. get_x() returns x matrix as (n_trotters, N)
xlist = ann.get_x()
# 14. creating summary object
summary = sq.make_summary(ann)
# 15. get the best engergy(for min E for minimizing problem, and max E for maxmizing problem)
print('E {}'.format(summary.E))
# 16. show the number of solutions that has the same energy of the best E.
print('Number of solutions : {}'.format(len(summary.xlist)))
# 17. show solutions. Max number of x is limited to 4.
nToShow = min(len(summary.xlist), 4)
for idx in range(nToShow) :
print(summary.xlist[idx])
|
{
"content_hash": "e1316e2455865c846a09b402393cfc36",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 93,
"avg_line_length": 28.151515151515152,
"alnum_prop": 0.6903480444922856,
"repo_name": "shinmorino/quant_sandbox",
"id": "a1dc04d6d10639314ed30d57de33830a9b7b3f29",
"size": "2787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqaodpy/example/bipartite_graph_annealer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "120151"
},
{
"name": "M4",
"bytes": "909"
},
{
"name": "Makefile",
"bytes": "1892"
},
{
"name": "Python",
"bytes": "61808"
},
{
"name": "Shell",
"bytes": "490"
}
],
"symlink_target": ""
}
|
class Solution(object):
def isPowerOfThree(self, n):
"""
:type n: int
:rtype: bool
"""
from math import log
l = round(log(n, 3))
return n == 3 ** l
if __name__ == '__main__':
print(Solution().isPowerOfThree(28))
print(Solution().isPowerOfThree(27))
print(Solution().isPowerOfThree(81))
print(Solution().isPowerOfThree(243))
|
{
"content_hash": "65342dab252b8cb2cad665f5e9c270ef",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 41,
"avg_line_length": 25.125,
"alnum_prop": 0.5522388059701493,
"repo_name": "moonfruit/leetcode",
"id": "6dbd08d196544c11f1a578c38266648843be6d8b",
"size": "446",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "p326_power_of_three.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "126687"
},
{
"name": "Shell",
"bytes": "1077"
}
],
"symlink_target": ""
}
|
"""CVなど。"""
from __future__ import annotations
import logging
import typing
import numpy as np
import sklearn.model_selection
import pytoolkit as tk
FoldsType = typing.Sequence[
typing.Tuple[
typing.Union[typing.Sequence[int], np.ndarray],
typing.Union[typing.Sequence[int], np.ndarray],
]
]
logger = logging.getLogger(__name__)
def split(
dataset: tk.data.Dataset,
nfold: int,
split_seed: int | None = 1,
stratify: bool | np.ndarray | None = None,
) -> list[tuple[np.ndarray, np.ndarray]]:
"""nfold CV。
Args:
dataset: 分割する元のデータセット
nfold: 分割数。ただし1の場合は特別に5foldの最初の1個しか実行しないバージョンということにする。
(もうちょっと分かりやすいインターフェースにしたいが利便性と両立する案が無いのでとりあえず…)
split_seed: シード値。Noneならシャッフルしない。
stratify: Trueの場合か、Noneでかつdataset.labelsがndarrayかつndim == 1ならStratifiedKFold。
FalseならKFold。ndarrayの場合はそれを使ってStratifiedKFold。
"""
if nfold == 1:
return split(dataset, nfold=5, split_seed=split_seed, stratify=stratify)[:1]
logger.info(f"split: {len(dataset)=} {nfold=} {split_seed=} {stratify=}")
shuffle = split_seed is not None
if dataset.groups is not None:
groups = dataset.groups
if groups.dtype == object:
# 文字列の場合、in1dが重いようなので連番を振る
groups_map = {g: i for i, g in enumerate(np.unique(groups))}
assert len(groups_map) < 2**31
groups = np.frompyfunc(groups_map.__getitem__, nin=1, nout=1)(
groups
).astype(np.int32)
g = np.unique(groups)
cv = sklearn.model_selection.KFold(
n_splits=nfold, shuffle=shuffle, random_state=split_seed
)
folds = [
(
np.where(np.in1d(groups, g[train_indices]))[0],
np.where(np.in1d(groups, g[val_indices]))[0],
)
for train_indices, val_indices in cv.split(g, g)
]
else:
if stratify is None:
stratify = (
isinstance(dataset.labels, np.ndarray) and dataset.labels.ndim == 1
)
if isinstance(stratify, np.ndarray):
X = dataset.data
y: typing.Any = stratify
stratify = True
elif stratify:
X = dataset.data
y = dataset.labels
else:
X = list(range(len(dataset)))
y = None
cv = (
sklearn.model_selection.StratifiedKFold
if stratify
else sklearn.model_selection.KFold
)
cv = cv(nfold, shuffle=shuffle, random_state=split_seed)
folds = list(cv.split(X, y))
return folds
def get_dummy_folds(train_set: tk.data.Dataset):
"""全データで学習して全データで検証するときのfoldsを返す。"""
indices = list(range(len(train_set)))
return [(indices, indices)]
def pseudo_labeling(
train_set: tk.data.Dataset,
folds1: FoldsType,
test_set: tk.data.Dataset,
folds2: FoldsType,
test_weights: float = 0.5,
):
"""pseudo labelなdataset, foldsを作って返す。
Args:
train_set: 訓練データ
folds1: 訓練データのfolds
test_set: テストデータ
folds2: テストデータのfolds
test_weights: 訓練データに対するテストデータの重み
"""
dataset = tk.data.Dataset.concat(train_set, test_set)
pl_weight = test_weights * len(train_set) / len(test_set)
w_train = (
np.ones(len(train_set)) if train_set.weights is None else train_set.weights
)
w_test = (
np.ones(len(test_set)) if test_set.weights is None else test_set.weights
) * pl_weight
dataset.weights = np.concatenate([w_train, w_test * pl_weight])
# train_indicesをtrainとtestでconcatしたものにする。val_indicesはtrainのみ。
folds = [
(np.concatenate([f1_t, np.asarray(f2_t) + len(train_set)]), f1_v)
for (f1_t, f1_v), (f2_t, _) in zip(folds1, folds2)
]
return dataset, folds
def concat_folds(folds1: FoldsType, folds2: FoldsType, fold2_offset: int) -> FoldsType:
"""folds同士をくっつける。"""
assert len(folds1) == len(folds2)
return [
(
np.concatenate([f1_t, np.asarray(f2_t) + fold2_offset]),
np.concatenate([f1_v, np.asarray(f2_v) + fold2_offset]),
)
for (f1_t, f1_v), (f2_t, f2_v) in zip(folds1, folds2)
]
|
{
"content_hash": "70823e5ecc8a387f7cc05f46ab0ffaa8",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 87,
"avg_line_length": 29.28965517241379,
"alnum_prop": 0.590534494937603,
"repo_name": "ak110/pytoolkit",
"id": "43b3df741f47c26384deb5833d16df2dd97c8d02",
"size": "4775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytoolkit/validation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "175"
},
{
"name": "Python",
"bytes": "562006"
},
{
"name": "Shell",
"bytes": "595"
}
],
"symlink_target": ""
}
|
import argparse
import sqlite3
from hashlib import sha1
from os import walk
from os.path import abspath, basename, exists, expanduser, isdir, join
from time import asctime, localtime, time
DATABASE_FILE_PATH = expanduser('~/.prot.sql')
DATA_TABLE_NAME = 'files'
METADATA_TABLE_NAME = 'metadata'
STATUS_OK = 0
STATUS_MISMATCH = 1
STATUS_REMEDIATE_NEW = 2
STATUS_REMEDIATE_OLD = 3
STATUS_NEW = 4
CHUNK_SIZE = 4096
VERSION = '1.4.1'
HASH = 'SHA-1'
class DB_Manager(object):
def __init__(self):
self.conn = None
self.curs = None
def __enter__(self):
self.connect_to_db()
self.create_tables()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.disconnect_from_db()
def connect_to_db(self, db=DATABASE_FILE_PATH):
try:
self.conn = sqlite3.connect(db)
except:
print u'Could not connect to database {}'.format(db)
print 'Exiting'
exit(1)
self.curs = self.conn.cursor()
def disconnect_from_db(self):
self.conn.commit()
self.conn.close()
def create_tables(self):
command = '''CREATE TABLE IF NOT EXISTS {}
(filename TEXT, path TEXT, hash TEXT, old_hash TEXT,
time TEXT, old_time TEXT, status INTEGER)'''.format(DATA_TABLE_NAME)
self.curs.execute(command)
self.curs.execute('''CREATE TABLE IF NOT EXISTS {}
(start TEXT, end TEXT, files_scanned INTEGER, files_added INTEGER,
files_updated INTEGER, files_mismatched INTEGER)'''.format(METADATA_TABLE_NAME))
self.conn.commit()
def upsert_file(self, filename, path, hash):
command = u'SELECT * FROM {} WHERE path = ?'.format(DATA_TABLE_NAME)
self.curs.execute(command, (path,))
row = self.curs.fetchone()
if row is None:
self.curs.execute(u'''INSERT INTO {}(filename, path, hash, time, status)
VALUES (?, ?, ?, ?, ?)'''.format(DATA_TABLE_NAME),
(filename, path, hash, time(), STATUS_NEW))
elif row[6] != STATUS_MISMATCH: # note: only update if status is not mismatch
if row[6] == STATUS_REMEDIATE_OLD:
old_hash = row[3]
status = STATUS_OK if old_hash == hash else STATUS_MISMATCH
self.curs.execute(u'''UPDATE {} SET hash = ?, time = ?, status = ?
WHERE path = ?'''.format(DATA_TABLE_NAME), (hash, time, status, path))
else:
old_hash = row[2]
status = STATUS_OK if old_hash == hash else STATUS_MISMATCH
self.curs.execute(u'''UPDATE {} SET hash = ?, time = ?, old_hash = hash, old_time = time, status = ?
WHERE path = ?'''.format(DATA_TABLE_NAME), (hash, time(), status, path))
self.conn.commit()
def remediate(self, path, status):
if exists(path):
if isdir(path):
for root, dirlist, filelist in walk(path, followlinks=True):
for f in filelist:
path = join(root, f)
self.curs.execute(u'''SELECT status FROM {} WHERE path = ?'''.format(DATA_TABLE_NAME), (path,))
row = self.curs.fetchone()
if row is not None and row[0] == STATUS_MISMATCH:
self.curs.execute(u'''UPDATE {} SET status = ? WHERE path = ?'''.format(DATA_TABLE_NAME),
(status, path))
self.conn.commit()
else:
print u'{} has no mismatch; skipping'.format(path)
else:
self.curs.execute(u'''SELECT status FROM {} WHERE path = ?'''.format(DATA_TABLE_NAME), (path,))
row = self.curs.fetchone()
if row is not None and row[0] == STATUS_MISMATCH:
self.curs.execute(u'''UPDATE {} SET status = ? WHERE path = ?'''.format(DATA_TABLE_NAME),
(status, path))
self.conn.commit()
else:
print u'{} has no mismatch; skipping'.format(path)
def get_mismatches(self, show_hashes=False, clean=False):
self.curs.execute('SELECT * FROM {} WHERE status = {:d}'.format(DATA_TABLE_NAME, STATUS_MISMATCH))
errors = 0
if not clean:
print 'MISMATCHES'
print_sep()
for row in self.curs:
errors += 1
if show_hashes:
print u'{} ({} != {})'.format(row[1], row[2], row[3])
else:
print row[1]
if not clean:
print_sep()
print '{:d} {} found'.format(errors, 'mismatch' if errors == 1 else 'mismatches')
return errors
def get_info(self, clean=False):
self.curs.execute('SELECT * FROM {} ORDER BY start DESC'.format(METADATA_TABLE_NAME))
(start, end, scanned, added, updated, mismatched) = self.curs.fetchone()
if not clean:
print 'LAST SCAN {} {}'.format(asctime(localtime(float(start))), asctime(localtime(float(end))))
print_sep()
print '{:d} file{} added'.format(added, 's' if added != 1 else '')
print '{:d} file{} updated'.format(updated, 's' if updated != 1 else '')
print '{:d} file{} with hash mismatches'.format(mismatched, 's' if mismatched != 1 else '')
if not clean:
print_sep()
print '{:d} total file{} scanned'.format(scanned, 's' if scanned != 1 else '')
def show_duplicates(self, filename, clean=False):
self.curs.execute(u'''SELECT path, hash FROM {} where filename = ?'''.format(DATA_TABLE_NAME), (filename,))
if not clean:
print filename
print_sep()
for row in self.curs:
print row[0], row[1]
if not clean:
print_sep()
def check_presence(self, path):
self.curs.execute(u'''SELECT * FROM {} WHERE path = ?'''.format(DATA_TABLE_NAME), (path,))
return self.curs.fetchone() is not None
def add_record(self, start_time, end_time, scanned):
self.curs.execute('SELECT COUNT(*) FROM {} WHERE status = {:d} AND time > {:f}'.format(DATA_TABLE_NAME,
STATUS_NEW, start_time))
added = self.curs.fetchone()[0]
self.curs.execute('SELECT COUNT(*) FROM {} WHERE NOT status = {:d} AND time > {:f}'.format(DATA_TABLE_NAME,
STATUS_NEW, start_time))
updated = self.curs.fetchone()[0]
self.curs.execute('SELECT COUNT(*) FROM {} WHERE status = {:d}'.format(DATA_TABLE_NAME, STATUS_MISMATCH))
mismatched = self.curs.fetchone()[0]
self.curs.execute('''INSERT INTO {} VALUES (?, ?, ?, ?, ?, ?)'''.format(METADATA_TABLE_NAME),
(start_time, end_time, scanned, added, updated, mismatched))
self.conn.commit()
def get_num_mismatches(self):
self.curs.execute('''SELECT COUNT(*) FROM {} WHERE status = {:d}'''.format(DATA_TABLE_NAME, STATUS_MISMATCH))
return self.curs.fetchone()[0]
def dump_database(self):
self.curs.execute('SELECT * FROM {}'.format(DATA_TABLE_NAME))
for row in self.curs:
print row
def protect_directory(directory, db, add_only=False):
files_scanned = 0
for root, dirlist, filelist in walk(directory, followlinks=True): # follow symlinks
for f in filelist:
files_scanned += 1
path = join(root, f)
if add_only and db.check_presence(path):
print u'skipping {}'.format(path)
continue
print u'hashing {}'.format(path)
with open(path, 'r') as source:
s = sha1()
chunk = source.read(CHUNK_SIZE)
while chunk != '':
s.update(chunk)
chunk = source.read(CHUNK_SIZE)
digest = s.hexdigest()
print u'storing {}'.format(path)
db.upsert_file(f, path, digest)
return files_scanned
def protect_file(path, db, add_only=False):
f = basename(path)
if add_only and db.check_presence(path):
print u'skipping {}'.format(path)
return
print u'hashing {}'. format(path)
with open(path, 'r') as source:
s = sha1()
chunk = source.read(CHUNK_SIZE)
while chunk != '':
s.update(chunk)
chunk = source.read(CHUNK_SIZE)
digest = s.hexdigest()
print u'storing {}'.format(path)
db.upsert_file(f, path, digest)
def print_sep():
print '-' * 40
if __name__ == '__main__':
# TODO: for mismatches, find duplicates across folders?
parser = argparse.ArgumentParser(description='Record file hashes and check on changes')
parser.add_argument('-v', '--version', version='saprotect {} using {}'.format(VERSION, HASH), action='version',
help='show version information')
parser.add_argument('-a', '--add-only', action='store_true',
help='when given with -p, only record files not already present in the database; '
'\does nothing otherwise')
group = parser.add_mutually_exclusive_group()
group.add_argument('-p', '--protect', metavar='TARGET', nargs='+',
help='record the hashes of the TARGETs and print any mismatches found')
group.add_argument('-r', '--remediate-old', metavar='TARGET', nargs='+',
help='resolve hash mismatches on the TARGETs in favor of the old hash')
group.add_argument('-R', '--remediate-new', metavar='TARGET', nargs='+',
help='resolve hash mismatches on the TARGETs in favor of the new hash')
group.add_argument('-d', '--show-duplicates', metavar='FILE',
help='show hashes for all files with name FILE in the database')
group.add_argument('-m', '--list-mismatches', action='store_true', help='show files with mismatched hashes')
arguments = parser.parse_args()
with DB_Manager() as dbm:
if arguments.protect is not None:
if dbm.get_num_mismatches() > 0:
print 'ERROR: Remediate mismatches below first!'
dbm.get_mismatches()
exit(0)
files_scanned = 0
start_time = time()
for target in arguments.protect:
target = abspath(unicode(target))
if exists(target):
if isdir(target):
files_scanned += protect_directory(target, dbm, arguments.add_only)
else:
protect_file(target, dbm, arguments.add_only)
files_scanned += 1
end_time = time()
dbm.add_record(start_time, end_time, files_scanned)
elif arguments.remediate_old is not None:
for target in arguments.remediate_old:
target = abspath(unicode(target))
if exists(target):
dbm.remediate(target, STATUS_REMEDIATE_OLD)
elif arguments.remediate_new is not None:
for target in arguments.remediate_new:
target = abspath(unicode(target))
if exists(target):
dbm.remediate(target, STATUS_REMEDIATE_NEW)
elif arguments.list_mismatches:
dbm.get_mismatches(True)
elif arguments.show_duplicates is not None:
dbm.show_duplicates(unicode(arguments.show_duplicates))
else:
dbm.get_info()
|
{
"content_hash": "eaf420c4ea23472638d17bbd3eca4114",
"timestamp": "",
"source": "github",
"line_count": 283,
"max_line_length": 123,
"avg_line_length": 42.30035335689046,
"alnum_prop": 0.538300893826748,
"repo_name": "voynix/saprotect",
"id": "bf2cc04bcd0386deeb923559c146a83a933a4d3e",
"size": "11971",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saprotect.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11971"
}
],
"symlink_target": ""
}
|
__Author__ = "Yoshihiro Tanaka"
__date__ = "2014-11-21"
import json, sys, os, commands
dirs = commands.getoutput("ls " + sys.argv[1]).split("\n")
readDirs = []
for i in range(len(dirs)):
if "tag_" in dirs[i]:
dirname = sys.argv[1].rstrip("/") + "/" + dirs[i]
if os.path.isdir(dirname):
readDirs.append(dirname)
dirs = readDirs
tarDict = {}
tagnames = []
for dirname in dirs:
files = [dirname + "/" + r for r in commands.getoutput("ls " + dirname + "/").split("\n") if len(r) != 0]
header = True
tagDict = {}
for filename in files:
with open(filename) as f:
lines = f.readlines()
for line in lines:
try:
data = json.loads(line)
except Exception as e:
sys.stderr.write(e + "\n")
continue
if "values" in data:
if "tags" in data["values"][0]:
for values in data["values"]:
tags = values["tags"].split()
for tag in tags:
tagnames.append(tag)
try:
tagDict[tag] += 1
except:
tagDict[tag] = 1
tarDict[dirname.split("tag_")[1]] = tagDict
tagnames = [r.encode('utf-8') for r in list(set(tagnames))]
output = ["word"] + tagnames
print("\t".join(output))
for k, tagDict in tarDict.items():
output = [k]
for tag in tagnames:
if tag in tagDict:
output.append(str(tagDict[tag]))
else:
output.append(str(0))
print("\t".join(output))
|
{
"content_hash": "198c6370279a9f81f128f18bb6a466d3",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 109,
"avg_line_length": 32.83018867924528,
"alnum_prop": 0.46206896551724136,
"repo_name": "CORDEA/niconico-visualization",
"id": "f960c6fa26d79ea75a678df70f01edf598f002d5",
"size": "1782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "act1/tagParseJSON.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "867"
},
{
"name": "Python",
"bytes": "38655"
},
{
"name": "R",
"bytes": "790"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
setup(
name='numtiff',
version='0.1',
description='Load and save NumPy arrays from and to TIFF images',
author='Mark A. Tsuchida',
author_email='mark@tsuchida.org',
packages=['numtiff'],
)
|
{
"content_hash": "9d622b9e13c42cbf0beaafd9f03eaebf",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 71,
"avg_line_length": 26.4,
"alnum_prop": 0.625,
"repo_name": "marktsuchida/numtiff",
"id": "0c83e60fd65cb0dc18f31b5773dce7fe57cf5424",
"size": "1365",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34994"
}
],
"symlink_target": ""
}
|
import logging
import numpy as np
import math
from copy import copy
from random import random, uniform
__author__ = 'azu'
class Competitive:
logging.basicConfig(filename='logger.log', level=logging.DEBUG)
def compet(self, p):
wp = self.w * p
m = max(wp)
for i in range(len(wp)):
if wp[i] == m:
return i
def __init__(self, patterns_len, neurons=2, alpha=0.5, epochs=1, w=None):
if w != None:
self.w = w
else:
self.w = np.matrix(np.zeros([neurons, patterns_len]))
self.alpha = alpha
self.epochs = epochs
def learn(self, patterns):
epoch = 0
diff = 1
while epoch < self.epochs or diff.all() > 0:
for p in patterns:
old = copy(self.w)
logging.info("----------------------")
logging.info("Epoca %s",epoch+1)
logging.info("w = %s",self.w)
logging.info("p = %s",p)
a = self.compet(p)
logging.info("Neurona ganadora: %s", a + 1)
self.w[a] += self.alpha * (p.transpose() - self.w[a])
logging.info("Actualizando %sW",a+1)
diff = abs(self.w - old)
epoch += 1
logging.info("--------------------------")
logging.info("Red entrenada %s epocas",self.epochs)
logging.info("--------------------------")
|
{
"content_hash": "01cdb951cab9d4f793be4180eea2d183",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 31.47826086956522,
"alnum_prop": 0.47513812154696133,
"repo_name": "blankazucenalg/NeuralNetworks",
"id": "7fabd27772e7e31764c70808fcf0531af01cf956",
"size": "1448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Competitive/CompetitiveNN.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "27787"
},
{
"name": "Python",
"bytes": "18948"
}
],
"symlink_target": ""
}
|
"""
Get all users
"""
# import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
handler_args['trusted_certs'] = "certs"
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = False
# optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["objtype"] = u'user'
print "...CALLING: handler.get_all with args: {}".format(kwargs)
response = handler.get_all(**kwargs)
print "...OUTPUT: Type of response: ", type(response)
print "...OUTPUT: print of response:"
print response
# call the export_obj() method to convert response to JSON and store it in out
export_kwargs = {}
export_kwargs['obj'] = response
export_kwargs['export_format'] = 'json'
print "...CALLING: handler.export_obj() with args {}".format(export_kwargs)
out = handler.export_obj(**export_kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: print the objects returned in JSON format:"
print out
'''STDOUT from running this:
...CALLING: pytan.handler() with args: {'username': 'Administrator', 'record_all_requests': True, 'loglevel': 1, 'debugformat': False, 'host': '10.0.1.240', 'password': 'Tanium2015!', 'port': '443'}
...OUTPUT: handler string: PyTan v2.1.4 Handler for Session to 10.0.1.240:443, Authenticated: True, Platform Version: 6.5.314.4301
...CALLING: handler.get_all with args: {'objtype': u'user'}
...OUTPUT: Type of response: <class 'taniumpy.object_types.user_list.UserList'>
...OUTPUT: print of response:
UserList, len: 7
...CALLING: handler.export_obj() with args {'export_format': 'json', 'obj': <taniumpy.object_types.user_list.UserList object at 0x109c4b0d0>}
...OUTPUT: print the objects returned in JSON format:
{
"_type": "users",
"user": [
{
"_type": "user",
"deleted_flag": 0,
"group_id": 0,
"id": 1,
"last_login": "2015-09-14T20:10:15",
"local_admin_flag": 1,
"name": "Administrator",
"permissions": {
"_type": "permissions",
"permission": [
"admin",
..trimmed for brevity..
'''
'''STDERR from running this:
'''
|
{
"content_hash": "7a7aa954743122424fe013fdb827cfea",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 198,
"avg_line_length": 32.88429752066116,
"alnum_prop": 0.6956521739130435,
"repo_name": "tanium/pytan",
"id": "25bc9a412c52f6f4e41f8ccbb0c0c87081c054f6",
"size": "4001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EXAMPLES/PYTAN_API/get_all_users.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "13251"
},
{
"name": "CSS",
"bytes": "32442"
},
{
"name": "HTML",
"bytes": "1232764"
},
{
"name": "JavaScript",
"bytes": "375167"
},
{
"name": "Makefile",
"bytes": "4287"
},
{
"name": "Python",
"bytes": "2541262"
},
{
"name": "Shell",
"bytes": "3194"
}
],
"symlink_target": ""
}
|
from . import layer
from .. import opr as O
import functools
__all__ = ['make_resnet', 'make_resnet_18', 'make_resnet_34', 'make_resnet_50',
'make_resnet_101', 'make_resnet_152']
def residual_first(name, src, increase_dim=False, is_bottleneck=False):
_ = src
shape = _.static_shape
if not is_bottleneck:
in_channel = shape[3]
out_channel = in_channel
else:
in_channel = shape[3]
out_channel = in_channel * 4
if not is_bottleneck:
_ = layer.conv_bn_relu('{}_branch2a'.format(name), _, out_channel, 3, stride=1)
_ = layer.conv2d_kaiming('{}_branch2b'.format(name), _, out_channel, 3, stride=1, use_bias=False)
else:
_ = layer.conv_bn_relu('{}_branch2a'.format(name), _, in_channel, 1, stride=1)
_ = layer.conv_bn_relu('{}_branch2b'.format(name), _, in_channel, 3, stride=1)
_ = layer.conv2d_kaiming('{}_branch2c'.format(name), _, out_channel, 1, stride=1, use_bias=False)
src = layer.conv2d_kaiming('{}_branch1'.format(name), src, out_channel, 1, stride=1, use_bias=False)
_ = O.add(_, src, name='{}_addition'.format(name))
return _
def residual_block(name, src, increase_dim=False, is_bottleneck=False):
_ = src
shape = _.static_shape
if increase_dim:
if not is_bottleneck:
in_channel = shape[3]
out_channel = in_channel * 2
else:
in_channel = shape[3] // 2
out_channel = in_channel * 4
stride = 2
else:
if not is_bottleneck:
in_channel = shape[3]
out_channel = in_channel
else:
in_channel = shape[3] // 4
out_channel = in_channel * 4
stride = 1
if not is_bottleneck:
_ = layer.bn_relu_conv('{}_branch2a'.format(name), _, out_channel, 3, stride=stride)
_ = layer.bn_relu_conv('{}_branch2b'.format(name), _, out_channel, 3, stride=1)
else:
_ = layer.bn_relu_conv('{}_branch2a'.format(name), _, in_channel, 1, stride=stride)
_ = layer.bn_relu_conv('{}_branch2b'.format(name), _, in_channel, 3, stride=1)
_ = layer.bn_relu_conv('{}_branch2c'.format(name), _, out_channel, 1, stride=1)
if increase_dim:
src = layer.conv2d_kaiming('{}_branch1'.format(name), src, out_channel, 1, stride=2, use_bias=False)
_ = O.add(_, src, name='{}_addition'.format(name))
return _
def make_resnet(src, blocks, is_bottleneck, output_imm=False, imm_act=False):
"""
Build resnet (preact version).
:param src: input tensor, of data type NHWC.
:param blocks: number of residual blocks for each residual module. Length should be 4.
:param is_bottleneck: use the bottleneck module or not (1x1 -> 3x3 -> 1x1).
:param output_imm: whether to output immediate results (conv1 ~ conv5) or not. If true, return value would be `gap, [conv1, conv2, conv3, conv4, conv5]`.
:param imm_act: whether to add `bn_relu` as activation function for each immediate convs.
"""
_ = src
_ = layer.conv2d_kaiming('conv1', _, 64, 7, stride=2, use_bias=False)
convs_imm = [_]
_ = O.batch_norm('conv1_bn', _)
_ = O.relu(_, name='conv1_relu')
convs_imm_act = [_]
_ = layer.max_pooling2d_comp('pool1', _, 3, stride=2, padding='SAME')
residual = functools.partial(residual_block, is_bottleneck=is_bottleneck)
assert len(blocks) == 4
stages = [2, 3, 4, 5]
for s, b in zip(stages, blocks):
if s == 2:
_ = residual_first('conv2_0', _, is_bottleneck=is_bottleneck)
else:
_ = residual('conv{}_0'.format(s), _, increase_dim=True)
print(_.name, _.static_shape)
for i in range(1, b):
_ = residual('conv{}_{}'.format(s, i), _)
print(_.name, _.static_shape)
convs_imm.append(_)
if imm_act:
_act = O.bn_relu(_, name='conv{}_act'.format(s))
convs_imm_act.append(_act)
_ = O.batch_norm('gap_bn', _)
_ = O.relu(_, name='gap_relu')
_ = layer.global_avg_pooling2d('gap', _)
if output_imm:
if imm_act:
return _, convs_imm_act
else:
return _, convs_imm
return _
make_resnet_18 = functools.partial(make_resnet, blocks=[2, 2, 2, 2], is_bottleneck=False)
make_resnet_34 = functools.partial(make_resnet, blocks=[3, 4, 6, 3], is_bottleneck=False)
make_resnet_50 = functools.partial(make_resnet, blocks=[3, 4, 6, 3], is_bottleneck=True)
make_resnet_101 = functools.partial(make_resnet, blocks=[3, 4, 23, 3], is_bottleneck=True)
make_resnet_152 = functools.partial(make_resnet, blocks=[3, 8, 36, 3], is_bottleneck=True)
if __name__ == '__main__':
img = O.placeholder('img', shape=(512, 224, 224, 3))
# out = make_resnet(img, [2, 2, 2, 2], False)
out = make_resnet(img, [3, 4, 6, 3], is_bottleneck=True)
from tartist.nn import get_default_env
for i in get_default_env().graph.get_operations():
print(i.name, i.type, sep='@', end=': ')
print(*[x.name for x in i.inputs], sep=', ')
|
{
"content_hash": "bc4be2e13d3903eceb2ee77d3bd90976",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 157,
"avg_line_length": 36.2,
"alnum_prop": 0.5832675611681136,
"repo_name": "vacancy/TensorArtist",
"id": "133e5927d2ca48a5d79b4b5455c129a7e87941b7",
"size": "5227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tartist/nn/cblk/resnet_preact.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "497134"
},
{
"name": "Shell",
"bytes": "630"
}
],
"symlink_target": ""
}
|
import mock
from oslo_policy import policy as oslo_policy
import six
import webob
from nova.api.openstack.compute import extension_info
from nova.api.openstack.compute import servers \
as server_v21
from nova.compute import api as compute_api
from nova import db
from nova import exception
from nova import policy
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests import uuidsentinel as uuids
class ServerStartStopTestV21(test.TestCase):
def setUp(self):
super(ServerStartStopTestV21, self).setUp()
self._setup_controller()
self.req = fakes.HTTPRequest.blank('')
self.useFixture(nova_fixtures.SingleCellSimple())
self.stub_out('nova.db.instance_get_by_uuid',
fakes.fake_instance_get())
def _setup_controller(self):
ext_info = extension_info.LoadedExtensionInfo()
self.controller = server_v21.ServersController(
extension_info=ext_info)
@mock.patch.object(compute_api.API, 'start')
def test_start(self, start_mock):
body = dict(start="")
self.controller._start_server(self.req, uuids.instance, body)
start_mock.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'start',
side_effect=exception.InstanceNotReady(
instance_id=uuids.instance))
def test_start_not_ready(self, start_mock):
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, uuids.instance, body)
@mock.patch.object(compute_api.API, 'start',
side_effect=exception.InstanceIsLocked(
instance_uuid=uuids.instance))
def test_start_locked_server(self, start_mock):
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, uuids.instance, body)
@mock.patch.object(compute_api.API, 'start',
side_effect=exception.InstanceIsLocked(
instance_uuid=uuids.instance))
def test_start_invalid_state(self, start_mock):
body = dict(start="")
ex = self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, uuids.instance, body)
self.assertIn('is locked', six.text_type(ex))
@mock.patch.object(compute_api.API, 'stop')
def test_stop(self, stop_mock):
body = dict(stop="")
self.controller._stop_server(self.req, uuids.instance, body)
stop_mock.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'stop',
side_effect=exception.InstanceNotReady(
instance_id=uuids.instance))
def test_stop_not_ready(self, stop_mock):
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, uuids.instance, body)
@mock.patch.object(compute_api.API, 'stop',
side_effect=exception.InstanceIsLocked(
instance_uuid=uuids.instance))
def test_stop_locked_server(self, stop_mock):
body = dict(stop="")
ex = self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, uuids.instance, body)
self.assertIn('is locked', six.text_type(ex))
@mock.patch.object(compute_api.API, 'stop',
side_effect=exception.InstanceIsLocked(
instance_uuid=uuids.instance))
def test_stop_invalid_state(self, stop_mock):
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, uuids.instance, body)
@mock.patch.object(db, 'instance_get_by_uuid',
side_effect=exception.InstanceNotFound(
instance_id=uuids.instance))
def test_start_with_bogus_id(self, get_mock):
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, self.req, uuids.instance, body)
@mock.patch.object(db, 'instance_get_by_uuid',
side_effect=exception.InstanceNotFound(
instance_id=uuids.instance))
def test_stop_with_bogus_id(self, get_mock):
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, self.req, uuids.instance, body)
class ServerStartStopPolicyEnforcementV21(test.TestCase):
start_policy = "os_compute_api:servers:start"
stop_policy = "os_compute_api:servers:stop"
def setUp(self):
super(ServerStartStopPolicyEnforcementV21, self).setUp()
ext_info = extension_info.LoadedExtensionInfo()
self.controller = server_v21.ServersController(
extension_info=ext_info)
self.req = fakes.HTTPRequest.blank('')
self.useFixture(nova_fixtures.SingleCellSimple())
self.stub_out(
'nova.db.instance_get_by_uuid',
fakes.fake_instance_get(
project_id=self.req.environ['nova.context'].project_id))
def test_start_policy_failed(self):
rules = {
self.start_policy: "project_id:non_fake"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
self.req, uuids.instance, body)
self.assertIn(self.start_policy, exc.format_message())
def test_start_overridden_policy_failed_with_other_user_in_same_project(
self):
rules = {
self.start_policy: "user_id:%(user_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
self.req, uuids.instance, body)
self.assertIn(self.start_policy, exc.format_message())
@mock.patch('nova.compute.api.API.start')
def test_start_overridden_policy_pass_with_same_user(self, start_mock):
rules = {
self.start_policy: "user_id:%(user_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
body = dict(start="")
self.controller._start_server(self.req, uuids.instance, body)
start_mock.assert_called_once_with(mock.ANY, mock.ANY)
def test_stop_policy_failed_with_other_project(self):
rules = {
self.stop_policy: "project_id:%(project_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
body = dict(stop="")
# Change the project_id in request context.
self.req.environ['nova.context'].project_id = 'other-project'
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
self.req, uuids.instance, body)
self.assertIn(self.stop_policy, exc.format_message())
@mock.patch('nova.compute.api.API.stop')
def test_stop_overridden_policy_pass_with_same_project(self, stop_mock):
rules = {
self.stop_policy: "project_id:%(project_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
body = dict(stop="")
self.controller._stop_server(self.req, uuids.instance, body)
stop_mock.assert_called_once_with(mock.ANY, mock.ANY)
def test_stop_overridden_policy_failed_with_other_user_in_same_project(
self):
rules = {
self.stop_policy: "user_id:%(user_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
# Change the user_id in request context.
self.req.environ['nova.context'].user_id = 'other-user'
body = dict(stop="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
self.req, uuids.instance, body)
self.assertIn(self.stop_policy, exc.format_message())
@mock.patch('nova.compute.api.API.stop')
def test_stop_overridden_policy_pass_with_same_user(self, stop_mock):
rules = {
self.stop_policy: "user_id:%(user_id)s"
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
body = dict(stop="")
self.controller._stop_server(self.req, uuids.instance, body)
stop_mock.assert_called_once_with(mock.ANY, mock.ANY)
|
{
"content_hash": "2c5024941674433c5e7236850eda303a",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 76,
"avg_line_length": 42.84688995215311,
"alnum_prop": 0.6141820212171971,
"repo_name": "vmturbo/nova",
"id": "c7624e7656b155b92d544d304f2ffe4ce147158f",
"size": "9571",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/unit/api/openstack/compute/test_server_start_stop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "601"
},
{
"name": "PHP",
"bytes": "4503"
},
{
"name": "Python",
"bytes": "18983608"
},
{
"name": "Shell",
"bytes": "31813"
},
{
"name": "Smarty",
"bytes": "307089"
}
],
"symlink_target": ""
}
|
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: f5bigip_ltm_profile_ocsp_stapling_params
short_description: BIG-IP ltm profile ocsp stapling params module
description:
- Configures an OCSP Stapling Params profile.
version_added: "2.4"
author:
- "Gabriel Fortin (@GabrielFortin)"
options:
cache_error_timeout:
description:
- Specifies the lifetime of an error response in the cache, in seconds.
default: 3600
cache_timeout:
description:
- Specifies the lifetime of the OCSP response in the cache, in seconds.
default: indefinite
clock_skew:
description:
- Specifies the tolerable absolute difference in the clocks of the responder and the BIG-IP, in seconds.
default: 300
dns_resolver:
description:
- Specifies the DNS resolver object used for fetching the OCSP response.
required: false
proxy_server_pool:
description:
- Specifies the proxy server pool used for fetching the OCSP response.
responder_url:
description:
- Specifies the absolute URL that overrides the OCSP responder URL obtained from the certificate's AIA
extension(s).
sign_hash:
description:
- Specifies the hash algorithm used for signing the OCSP request.
default: sha256
choices: ['sha256', 'sha1']
signer_cert:
description:
- Specifies the certificate corresponding to the key used for signing the OCSP request.
signer_key:
description:
- Specifies the passphrase of the key used for signing the OCSP request.
signer_key_passphrase:
description:
- Specifies the passphrase of the key used for signing the OCSP request.
state:
description:
- Specifies the state of the component on the BIG-IP system.
default: present
choices: ['absent', 'present']
status_age:
description:
- Specifies the allowed age of the OCSP response when nextUpdate time is omitted from the response.
default: 300
strict_resp_cert_check:
description:
- If enabled, the responder's certificate is checked for OCSP signing extension.
default: disabled
choices: ['disabled', 'enabled']
timeout:
description:
- Specifies the time interval (in seconds) that the BIG-IP waits for before aborting the connection to the
OCSP responder.
default: 8
trusted_ca:
description:
- Specifies the certificate-authority that signs the responder's certificate.
trusted_responders:
description:
- Specifies the certificate(s) used for validating the OCSP response when the responder's certificate has
been omitted from the response.
use_proxy_server:
description:
- Specifies whether the proxy server pool or the DNS resolver should be used for the connection to the OCSP
responder.
choices: ['disabled', 'enabled']
requirements:
- BIG-IP >= 12.0
- ansible-common-f5
- f5-sdk
'''
EXAMPLES = '''
- name: Create LTM Profile OCSP Stapling Params
f5bigip_ltm_profile_ocsp_stapling_params:
f5_hostname: 172.16.227.35
f5_username: admin
f5_password: admin
f5_port: 443
name: my_ocsp_stapling_params_profile
partition: Common
dns_resolver: /Common/my_dns_resolver
trusted_ca: /Common/ca-bundle.crt
use_proxy_server: disabled
state: present
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible_common_f5.base import F5_ACTIVATION_CHOICES
from ansible_common_f5.base import F5_NAMED_OBJ_ARGS
from ansible_common_f5.base import F5_PROVIDER_ARGS
from ansible_common_f5.bigip import F5BigIpNamedObject
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
cache_error_timeout=dict(type='int'),
cache_timeout=dict(type='int'),
clock_skew=dict(type='int'),
dns_resolver=dict(type='str'),
proxy_server_pool=dict(type='str'),
responder_url=dict(type='str'),
sign_hash=dict(type='str', choices=['sha1', 'sha256']),
signer_cert=dict(type='str'),
signer_key=dict(type='str'),
signer_key_passphrase=dict(type='str', no_log=True),
status_age=dict(type='int'),
strict_resp_cert_check=dict(type='str', choices=F5_ACTIVATION_CHOICES),
timeout=dict(type='int'),
trusted_ca=dict(type='str'),
trusted_responders=dict(type='str'),
use_proxy_server=dict(type='str', choices=F5_ACTIVATION_CHOICES)
)
argument_spec.update(F5_PROVIDER_ARGS)
argument_spec.update(F5_NAMED_OBJ_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
class F5BigIpLtmProfileOcspStaplingParams(F5BigIpNamedObject):
def _set_crud_methods(self):
self._methods = {
'create': self._api.tm.ltm.profile.ocsp_stapling_params_s.ocsp_stapling_params.create,
'read': self._api.tm.ltm.profile.ocsp_stapling_params_s.ocsp_stapling_params.load,
'update': self._api.tm.ltm.profile.ocsp_stapling_params_s.ocsp_stapling_params.update,
'delete': self._api.tm.ltm.profile.ocsp_stapling_params_s.ocsp_stapling_params.delete,
'exists': self._api.tm.ltm.profile.ocsp_stapling_params_s.ocsp_stapling_params.exists
}
def main():
params = ModuleParams()
module = AnsibleModule(argument_spec=params.argument_spec, supports_check_mode=params.supports_check_mode)
try:
obj = F5BigIpLtmProfileOcspStaplingParams(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == '__main__':
main()
|
{
"content_hash": "3368a9b909678d7290597d8960a20497",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 119,
"avg_line_length": 36.37647058823529,
"alnum_prop": 0.6447283311772316,
"repo_name": "GabrielFortin/ansible-module-f5bigip",
"id": "4bc51f3941ec1beeda989aa24c1d74746a30b3ea",
"size": "6829",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "library/f5bigip_ltm_profile_ocsp_stapling_params.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1265283"
}
],
"symlink_target": ""
}
|
import shippo
shippo.auth = ('wissam+test@mit.edu', 'wissamtest')
print "Creating address..."
resp = shippo.Address.create(
object_purpose='QUOTE',
name='Laura Behrens Wu',
street1='Clayton St.',
company='Shippo',
street_no=215,
phone='+1 555 341 9393',
city='San Francisco',
state='CA',
zipcode='94117',
country='US',
email='laura@goshippo.com',
metadata= 'Customer ID 123456'
)
print 'Success: %r' % (resp, )
|
{
"content_hash": "067c0d165afd05ed1b78c669d8fd45f7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 51,
"avg_line_length": 21.045454545454547,
"alnum_prop": 0.6241900647948164,
"repo_name": "bosswissam/shippo-python",
"id": "d27140261fd369729f7c53d01c0cd7eb1655ea16",
"size": "463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23207"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
class PosConfig(AppConfig):
name = 'pos'
|
{
"content_hash": "7d9dc8ff0ff36a259a6df3cf49733605",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 33,
"avg_line_length": 16.2,
"alnum_prop": 0.7283950617283951,
"repo_name": "Epse/EpPos",
"id": "6ec62a9a1105fd6b1772fdca93adb20adc5e40bb",
"size": "81",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "web/pos/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "213738"
},
{
"name": "Dockerfile",
"bytes": "290"
},
{
"name": "HTML",
"bytes": "34806"
},
{
"name": "JavaScript",
"bytes": "234230"
},
{
"name": "Makefile",
"bytes": "256"
},
{
"name": "Python",
"bytes": "45346"
},
{
"name": "Shell",
"bytes": "578"
}
],
"symlink_target": ""
}
|
import rospy
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyServiceCaller, ProxyActionClient
from motion_editor_core.srv import ExecuteMotion, ExecuteMotionRequest, ExecuteMotionResponse
from actionlib import SimpleActionClient
from motion_editor_core.msg import ExecuteMotionAction, ExecuteMotionGoal
"""
Created on 30.05.2013
@author: Philipp Schillinger, Martin Oehler
"""
class MotionServiceState(EventState):
"""Implements a state where a certain motion is performed.
This state can be used to execute motions created by the motion editor.
-- motion_key string Reference to the motion to be executed.
-- time_factor float Factor to multiply with the default execution time of the specified motion.
For example, 2 will result in a motion twice as long, thus executed at half speed.
<= done Indicates that the expected execution time of the motion has passed.
The motion service provides no way to check if a motion has actually finished.
<= failed Indicates that the requested motion doesn't exist and therefore wasn't executed
"""
def __init__(self, motion_key, time_factor=1):
"""
Constructor
"""
super(MotionServiceState, self).__init__(outcomes=['done', 'failed'])
self.motion_key = motion_key
self.time_factor = time_factor
self._finish_time = None
self._motion_goal_ns = '/motion_service/motion_goal'
self._client = ProxyActionClient({self._motion_goal_ns: ExecuteMotionAction})
self._failed = False
self._done = False
def execute(self, userdata):
"""
Execute this state
"""
if self._failed:
return 'failed'
if self._done:
return 'done'
if self._client.has_result(self._motion_goal_ns):
result = self._client.get_result(self._motion_goal_ns)
if result is None: # Bug in actionlib, sometimes returns None instead of result
# Operator decision needed
Logger.logwarn("Failed to execute the motion '%s':\nAction result is None" % self.motion_key)
self._failed = True
return 'failed'
if result.error_string is None or len(result.error_code) == 0:
Logger.logwarn("Failed to execute the motion '%s':\nAction result is None" % self.motion_key)
self._failed = True
return 'failed'
success = all(lambda x: x == 0, result.error_code)
if success:
rospy.loginfo('Trajectory finished successfully.') # dont need to send this to the operator
self._done = True
return 'done'
else:
Logger.logwarn("Failed to execute the motion '%s':\n%s" % (self.motion_key, str(result.error_code)))
self._failed = True
return 'failed'
def on_enter(self, userdata):
self._failed = False
self._done = False
# build goal
goal = ExecuteMotionGoal()
goal.motion_name = self.motion_key
goal.time_factor = self.time_factor
try:
self._client.send_goal(self._motion_goal_ns, goal)
except Exception as e:
Logger.logwarn("Failed sending motion goal for '%s':\n%s" % (self.motion_key, str(e)))
def on_exit(self, userdata):
if not self._client.has_result(self._motion_goal_ns):
self._client.cancel(self._motion_goal_ns)
Logger.loginfo("Cancelled active motion goal.")
|
{
"content_hash": "a03f195dcd56a9a5c518c523c5ea6d1f",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 116,
"avg_line_length": 38.177083333333336,
"alnum_prop": 0.6163710777626193,
"repo_name": "team-vigir/vigir_behaviors",
"id": "c7ee590e48d681d8548ed807069f4e3ca9eff9dd",
"size": "3688",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vigir_flexbe_states/src/vigir_flexbe_states/motion_service_state.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "36163"
},
{
"name": "Python",
"bytes": "914028"
}
],
"symlink_target": ""
}
|
import io
import mimetypes
import os
import stat
import magic
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import SuspiciousOperation
from django.http import HttpResponse
from django.http import StreamingHttpResponse
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from igdectk.rest.handler import *
from igdectk.rest.response import HttpResponseRest
from medialibrary.models import Media
from permission.utils import get_permissions_for
from . import localsettings
from .base import RestMediaLibrary
logger = logging.getLogger('collgate')
class RestMedia(RestMediaLibrary):
regex = r'^media/$'
suffix = 'media'
class RestMediaUpload(RestMedia):
regex = r'^upload/$'
suffix = 'upload'
class RestMediaUUID(RestMedia):
regex = r'^(?P<uuid>[a-zA-Z0-9]{8}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{4}-[a-zA-Z0-9]{12})/$'
suffix = 'uuid'
class RestMediaUUIDUpload(RestMediaUUID):
regex = r'^upload/$'
suffix = 'upload'
class RestMediaUUIDDownload(RestMediaUUID):
regex = r'^download/$'
suffix = 'download'
@RestMediaUUID.def_auth_request(Method.GET, Format.JSON)
def get_media(request, uuid):
"""
Returns the media details (mime-type, file name, size...) but not the file content.
"""
media = get_object_or_404(Media, uuid=uuid)
# check user permission on the media
if media.owner_content_type == "auth.user":
if media.owner_object_id != request.user.pk:
raise PermissionDenied(_('Your are not the owner of the media'))
else:
perms = get_permissions_for(request.user,
media.owner_content_type.app_label,
media.owner_content_type.model,
media.owner_object_id)
if len(perms) == 0:
raise PermissionDenied(_('No permissions to the owner entity'))
result = {
'id': media.pk,
'uuid': media.uuid,
'name': media.name,
'created_date': media.created_date,
'modified_date': media.modified_date,
'file_name': media.file_name,
'file_size': media.file_size,
'mime_type': media.mime_type
}
return HttpResponseRest(request, result)
@RestMediaUUID.def_auth_request(Method.DELETE, Format.JSON)
def delete_media(request, uuid):
"""
Delete an existing media if the actual owner is the user of the upload.
"""
media = get_object_or_404(Media, uuid=uuid)
# check user permission on the media
if ".".join(media.owner_content_type.natural_key()) != "auth.user" or media.owner_object_id != request.user.pk:
raise PermissionDenied(_("Your are not the owner of the media"))
try:
# delete the related file
abs_filename = os.path.join(localsettings.storage_path, media.name)
if os.path.exists(abs_filename):
os.remove(abs_filename)
# and the model
media.delete()
except:
raise SuspiciousOperation(_("Unable to delete the media"))
return HttpResponseRest(request, {})
@RestMediaUUIDDownload.def_auth_request(Method.GET, Format.ANY)
def download_media_content(request, uuid):
"""
Download the content of a file using its UUID.
@see https://www.nginx.com/resources/wiki/start/topics/examples/x-accel/
@see https://bitbucket.org/renlinx007/django-fileprovider project for example
"""
media = get_object_or_404(Media, uuid=uuid)
# check user permission on the media
if media.owner_content_type == "auth.user":
if media.owner_object_id != request.user.pk:
raise PermissionDenied(_('Your are not the owner of the media'))
else:
perms = get_permissions_for(request.user,
media.owner_content_type.app_label,
media.owner_content_type.model,
media.owner_object_id)
if len(perms) == 0:
raise PermissionDenied(_('No permissions to the owner entity'))
if settings.DEBUG:
abs_filename = os.path.join(localsettings.storage_path, media.name)
local_file = open(abs_filename, "rb")
# response = HttpResponse(content_type=media.mime_type)
# response['Content-Disposition'] = 'attachment; filename="' + media.file_name + '"'
# response.content = local_file
response = StreamingHttpResponse(local_file, content_type=media.mime_type)
response['Content-Disposition'] = 'attachment; filename="' + media.file_name + '"'
response['Content-Length'] = media.file_size
else:
response = HttpResponse(content_type=media.mime_type)
response['Content-Disposition'] = 'attachment; filename="' + media.file_name + '"'
response['X-Accel-Redirect'] = "{0}/{1}".format(localsettings.storage_location, media.name)
return response
@RestMediaUpload.def_auth_request(Method.POST, Format.JSON)
def upload_media(request):
"""
Upload a media file from multi-part HTTP file request.
@see https://docs.djangoproject.com/fr/1.10/ref/files/uploads/#custom-upload-handlers
"""
if not request.FILES:
raise SuspiciousOperation(_("No file specified"))
up = request.FILES['file']
# check file size
if up.size > localsettings.max_file_size:
SuspiciousOperation(_("Upload file size limit is set to %i bytes") % localsettings.max_file_size)
# simple check mime-types using the file extension (can process a test using libmagic)
guessed_mime_type = mimetypes.guess_type(up.name)[0]
if guessed_mime_type is None:
SuspiciousOperation(_("Undetermined uploaded file type"))
# validate the file name and update it in way to be multi OS compliant
# remove any '.' before and after
name = up.name.strip('.')
valid_name = io.StringIO()
# replace forbidden characters by '_'
for c in name:
if ord(c) < 32 or c in ('<', '>', '"', '|', '\\', '`', '*', '?', ':', '/'):
c = '_'
valid_name.write(c)
media = Media()
# generate two levels of path from the uuid node
l1_path = '%02x' % (((media.uuid.node & 0xffffff000000) >> 24) % 256)
l2_path = '%02x' % ((media.uuid.node & 0x000000ffffff) % 256)
local_path = os.path.join(l1_path, l2_path)
local_file_name = str(media.uuid)
media.name = os.path.join(local_path, local_file_name)
media.version = 1
media.file_name = valid_name.getvalue()
media.file_size = up.size
# default owner is the user of the upload
media.owner_content_type = ContentType.objects.get_by_natural_key("auth", "user")
media.owner_object_id = request.user.pk
# create the path if necessary
abs_path = os.path.join(localsettings.storage_path, local_path)
if not os.path.exists(abs_path):
os.makedirs(abs_path, 0o770)
abs_file_name = os.path.join(abs_path, local_file_name)
dst_file = open(abs_file_name, "wb")
# test mime-type with a buffer of a least 1024 bytes
test_mime_buffer = io.BytesIO()
# copy file content
for chunk in up.chunks():
dst_file.write(chunk)
if test_mime_buffer.tell() < 1024:
test_mime_buffer.write(chunk)
dst_file.close()
guessed_mime_type = magic.from_buffer(test_mime_buffer.getvalue(), mime=True)
# 0660 on file
os.chmod(abs_file_name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP)
media.mime_type = guessed_mime_type # up.content_type
# save the model once file is correctly saved
media.save()
result = {
'id': media.id,
'uuid': media.uuid,
'name': media.name,
'created_date': media.created_date,
'modified_date': media.modified_date,
'file_name': media.file_name,
'file_size': media.file_size
}
return HttpResponseRest(request, result)
@RestMediaUUID.def_auth_request(Method.PUT, Format.JSON)
def update_upload_media(request, uuid):
"""
Upload a media file from multi-part HTTP file request.
"""
if not request.FILES:
raise SuspiciousOperation(_("No file specified"))
up = request.FILES['file']
# check file size
if up.size > localsettings.max_file_size:
SuspiciousOperation(_("Upload file size limit is set to %i bytes") % localsettings.max_file_size)
# simple check mime-types using the file extension (can process a test using libmagic)
guessed_mime_type = mimetypes.guess_type(up.name)[0]
if guessed_mime_type is None:
SuspiciousOperation(_("Undetermined uploaded file type"))
media = get_object_or_404(Media, uuid=uuid)
# check user permission on the media
if media.owner_content_type == "auth.user":
if media.owner_object_id != request.user.pk:
raise PermissionDenied(_('Your are not the owner of the media'))
else:
perms = get_permissions_for(request.user,
media.owner_content_type.app_label,
media.owner_content_type.model,
media.owner_object_id)
if '%s.change_%s' % (media.owner_content_type.app_label, media.owner_content_type.model) not in perms:
raise PermissionDenied(_('No change permission to the owner entity'))
version = media.version + 1
abs_file_name = os.path.join(localsettings.storage_path, media.name)
if not os.path.isfile(abs_file_name):
SuspiciousOperation(_("Trying to update a non-existing file"))
dst_file = open(abs_file_name, "wb")
# test mime-type with a buffer of a least 1024 bytes
test_mime_buffer = io.BytesIO()
# copy file content
for chunk in up.chunks():
dst_file.write(chunk)
if test_mime_buffer.tell() < 1024:
test_mime_buffer.write(chunk)
dst_file.close()
guessed_mime_type = magic.from_buffer(test_mime_buffer.getvalue(), mime=True)
# 0660 on file
os.chmod(abs_file_name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP)
# upgrade the version number and file size
media.version = version
media.file_size = up.size
media.mime_type = guessed_mime_type # up.content_type
# update the model once file is correctly saved
media.save()
result = {
'id': media.id,
'uuid': media.uuid,
'version': media.version,
'mime_type': media.content_type,
'file_size': media.file_size,
'modified_date': media.modified_date
}
return HttpResponseRest(request, result)
|
{
"content_hash": "e7ec8c7599a6a1f6bf79d00bf8cf7621",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 115,
"avg_line_length": 33.33956386292835,
"alnum_prop": 0.639319753317137,
"repo_name": "coll-gate/collgate",
"id": "d923218e95cd858ebf7ba12e386504fcec59efd5",
"size": "10909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/medialibrary/media.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20334"
},
{
"name": "HTML",
"bytes": "245334"
},
{
"name": "JavaScript",
"bytes": "5131841"
},
{
"name": "Python",
"bytes": "1291968"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
}
|
"""
Classes for working with fragment distribution
@author: Alicia Schep, Greenleaf Lab, Stanford University
"""
import numpy as np
import pyximport
pyximport.install(setup_args={"include_dirs":np.get_include()})
from pyatac.fragments import getAllFragmentSizes, getFragmentSizesFromChunkList
class FragmentSizes:
"""Class for storing fragment size distribution"""
def __init__(self, lower, upper, atac = True, vals = None):
self.lower = lower
self.upper = upper
self.vals = vals
self.atac = atac
def calculateSizes(self, bamfile, chunks = None):
if chunks is None:
sizes = getAllFragmentSizes(bamfile, self.lower, self.upper, atac = self.atac)
else:
sizes = getFragmentSizesFromChunkList(chunks, bamfile, self.lower, self.upper, atac = self.atac)
self.vals = sizes / (np.sum(sizes) + (np.sum(sizes)==0))
def get(self, lower = None, upper = None, size = None):
if size:
try:
return self.vals[size - self.lower]
except:
raise Exception("Looks like size doesn't match FragmentSizes")
else:
if lower is None:
lower = self.lower
if upper is None:
upper = self.upper
y1 = lower - self.lower
y2 = upper - self.lower
try:
return self.vals[y1:y2]
except:
raise Exception("Looks like dimensions from get probaby don't match FragmentSizes")
def save(self, filename):
"""Save Fragment Distribution information"""
f = open(filename,"w")
f.write("#lower\n")
f.write(str(self.lower)+"\n")
f.write("#upper\n")
f.write(str(self.upper)+"\n")
f.write("#sizes\n")
f.write("\t".join(map(str,self.get()))+"\n")
f.close()
@staticmethod
def open(filename):
"""Create FragmentDistribution object from text descriptor file"""
infile = open(filename,'r')
state = ''
for line in infile:
if '#lower' in line:
state = 'lower'
elif '#upper' in line:
state = 'upper'
elif '#sizes' in line:
state = 'sizes'
elif '#' in line:
state = 'other'
elif state == 'lower':
lower = int(line.strip('\n'))
elif state == 'upper':
upper = int(line.strip('\n'))
elif state == 'sizes':
fragmentsizes = np.array(map(float,line.rstrip("\n").split("\t")))
try:
new = FragmentSizes(lower, upper, vals = fragmentsizes)
except NameError:
raise Exception("FragmentDistribution decriptor file appeas to be missing some\
needed components")
infile.close()
return new
|
{
"content_hash": "2d57043ee9908939554504f393283a12",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 108,
"avg_line_length": 34.416666666666664,
"alnum_prop": 0.5541335178139052,
"repo_name": "GreenleafLab/NucleoATAC",
"id": "4fd6ba00d56c6acfec46a847cd4db5ec67c89927",
"size": "2891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyatac/fragmentsizes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "194662"
}
],
"symlink_target": ""
}
|
import numpy as np
from .abstract import _AbstractMicrostructureBasis
class PrimitiveBasis(_AbstractMicrostructureBasis):
r"""
Discretize the microstructure function into `n_states` local states such
that:
.. math::
\frac{1}{\Delta x} \int_{H} \int_{s} \Lambda(h - l)
m(h, x) dx dh = m[l, s]
where :math:`\Lambda` is the primitive basis (also called hat basis)
function
.. math::
\Lambda (h - l) = max \Bigg (1-\Bigg |\frac{h(L - 1)}{H} -
\frac{Hl}{L-1} \Bigg|, 0\Bigg)
A microstructure function discretized with this basis is subject to the
following constraint
..math::
\sum_{l=0}^L m[l, s] = 1
which is equivalent of saying that every location is filled with some
configuration of local states.
Here is an example with 3 discrete local states in a microstructure.
>>> X = np.array([[[1, 1, 0],
... [1, 0 ,2],
... [0, 1, 0]]])
>>> assert(X.shape == (1, 3, 3))
The when a microstructure is discretized, the different local states are
mapped into local state space, which results in an array of shape
`(n_samples, n_x, n_y, n_states)`, where `n_states=3` in this case.
For example, if a cell has a label of 2, its local state will be
`[0, 0, 1]`. The local state can only have values of 0 or 1.
>>> prim_basis = PrimitiveBasis(n_states=3)
>>> X_prim = np.array([[[[0, 1, 0],
... [0, 1, 0],
... [1, 0, 0]],
... [[0, 1, 0],
... [1, 0, 0],
... [0, 0, 1]],
... [[1, 0, 0],
... [0, 1, 0],
... [1, 0, 0]]]])
>>> assert(np.allclose(X_prim, prim_basis.discretize(X)))
Check that the basis works when all the states are present in the
microstructure.
>>> prim_basis = PrimitiveBasis(n_states=3)
>>> X = np.array([1, 1, 0])
>>> X_prim = np.array([[0, 1, 0],
... [0, 1, 0],
... [1, 0, 0]])
>>> assert(np.allclose(X_prim, prim_basis.discretize(X)))
In previous two microstructures had values that fell on the peak of the
primitive (or hat) basis functions. If a local state value falls between
two peaks of the primitive basis functions the value will be shared by both
basis functions. To ensure that all local states fall between the peaks
of two basis functions, we need to specify the local state domain. For
example, if a cell has a value of 0.4, and the basis has n_states=2 and
the domain=[0, 1] then the local state is (0.6, 0.4) (the local state
must sum to 1).
Here are a few examples where the local states fall between the picks of
local states. The first specifies the local state space domain between
`[0, 1]`.
>>> n_states = 10
>>> np.random.seed(4)
>>> X = np.random.random((2, 5, 3, 2))
>>> X_ = PrimitiveBasis(n_states, [0, 1]).discretize(X)
>>> H = np.linspace(0, 1, n_states)
>>> Xtest = np.sum(X_ * H[None,None,None,:], axis=-1)
>>> assert np.allclose(X, Xtest)
Here is an example where the local state space domain is between `[-1, 1]`.
>>> n_states = 3
>>> X = np.array([-1, 0, 1, 0.5])
>>> X_test = [[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0.5, 0.5]]
>>> X_ = PrimitiveBasis(n_states, [-1, 1]).discretize(X)
>>> assert np.allclose(X_, X_test)
If the local state values in the microstructure are outside of the domain
they can no longer be represented by two primitive basis functions and
violates constraint above.
>>> n_states = 2
>>> X = np.array([-1, 1])
>>> prim_basis = PrimitiveBasis(n_states, domain=[0, 1])
>>> prim_basis.discretize(X)
Traceback (most recent call last):
...
RuntimeError: X must be within the specified domain
"""
def _get_basis_slice(self, ijk, s0):
"""
Helper method used to calibrate influence coefficients from in
mks_localization_model to account for redundancies from linearly
dependent local states.
"""
if np.all(np.array(ijk) == 0):
s1 = s0
else:
s1 = (slice(-1),)
return s1
def discretize(self, X):
"""
Discretize `X`.
Args:
X (ND array): The microstructure, an `(n_samples, n_x, ...)`
shaped array where `n_samples` is the number of samples and
`n_x` is thes patial discretization.
Returns:
Float valued field of local states between 0 and 1.
"""
self.check(X)
H = np.linspace(self.domain[0], self.domain[1], self.n_states)
return np.maximum(1 - (abs(X[..., None] - H)) / (H[1] - H[0]), 0)
|
{
"content_hash": "6a333f4b066f08fa069c370a79336fce",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 79,
"avg_line_length": 35.43065693430657,
"alnum_prop": 0.5558302430984755,
"repo_name": "XinyiGong/pymks",
"id": "b3319026b94596b08f6466b44b527457ba4d90bf",
"size": "4854",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymks/bases/primitive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "153013"
}
],
"symlink_target": ""
}
|
import wx
from cairis.core.armid import *
from TargetDialog import TargetDialog
from cairis.core.Target import Target
__author__ = 'Shamal Faily'
class TargetListCtrl(wx.ListCtrl):
def __init__(self,parent,winId,boxSize=wx.DefaultSize):
wx.ListCtrl.__init__(self,parent,winId,size=boxSize,style=wx.LC_REPORT)
self.theParentWindow = parent
self.InsertColumn(0,'Target')
self.SetColumnWidth(0,150)
self.InsertColumn(1,'Effectiveness')
self.SetColumnWidth(1,100)
self.InsertColumn(2,'Rationale')
self.SetColumnWidth(1,300)
self.theDimMenu = wx.Menu()
self.theDimMenu.Append(TARGETLISTCTRL_MENUADD_ID,'Add')
self.theDimMenu.Append(TARGETLISTCTRL_MENUDELETE_ID,'Delete')
self.theSelectedValue = ''
self.theSelectedIdx = -1
self.setTargets = {}
self.Bind(wx.EVT_RIGHT_DOWN,self.OnRightDown)
self.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnItemSelected)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnItemDeselected)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED,self.onItemActivated)
wx.EVT_MENU(self.theDimMenu,TARGETLISTCTRL_MENUADD_ID,self.onAddTarget)
wx.EVT_MENU(self.theDimMenu,TARGETLISTCTRL_MENUDELETE_ID,self.onDeleteTarget)
def setEnvironment(self,environmentName):
self.theCurrentEnvironment = environmentName
if ((self.theCurrentEnvironment in self.setTargets) == False):
self.setTargets[self.theCurrentEnvironment] = set([])
def OnItemSelected(self,evt):
self.theSelectedIdx = evt.GetIndex()
def OnItemDeselected(self,evt):
self.theSelectedIdx = -1
def OnRightDown(self,evt):
self.PopupMenu(self.theDimMenu)
def onItemActivated(self,evt):
x = evt.GetIndex()
targetName = self.GetItemText(x)
targetEffectiveness = self.GetItem(x,1).GetText()
eRationale = self.GetItem(x,2).GetText()
reqCtrl = self.theParentWindow.FindWindowById(COUNTERMEASURE_LISTREQUIREMENTS_ID)
reqList = reqCtrl.dimensions()
dlg = TargetDialog(self,reqList,self.setTargets[self.theCurrentEnvironment],self.theCurrentEnvironment)
dlg.load(targetName,targetEffectiveness,eRationale)
if (dlg.ShowModal() == TARGET_BUTTONCOMMIT_ID):
targetName = dlg.target()
effectivenessValue = dlg.effectiveness()
eRat = dlg.rationale()
self.SetStringItem(x,0,targetName)
self.SetStringItem(x,1,effectivenessValue)
self.SetStringItem(x,2,eRat)
(self.setTargets[self.theCurrentEnvironment]).add(targetName)
def onAddTarget(self,evt):
reqCtrl = self.theParentWindow.FindWindowById(COUNTERMEASURE_LISTREQUIREMENTS_ID)
reqList = reqCtrl.dimensions()
if (len(reqList) == 0):
dlg = wx.MessageDialog(self,'Add target','No requirements selected',wx.OK | wx.ICON_EXCLAMATION)
dlg.ShowModal()
dlg.Destroy()
return
dlg = TargetDialog(self,reqList,self.setTargets[self.theCurrentEnvironment],self.theCurrentEnvironment)
if (dlg.ShowModal() == TARGET_BUTTONCOMMIT_ID):
targetName = dlg.target()
effectivenessValue = dlg.effectiveness()
eRat = dlg.rationale()
idx = self.GetItemCount()
self.InsertStringItem(idx,targetName)
self.SetStringItem(idx,1,effectivenessValue)
self.SetStringItem(idx,2,eRat)
self.theSelectedValue = targetName
(self.setTargets[self.theCurrentEnvironment]).add(targetName)
def onDeleteTarget(self,evt):
if (self.theSelectedIdx == -1):
errorText = 'No target selected'
errorLabel = 'Delete mitigation target'
dlg = wx.MessageDialog(self,errorText,errorLabel,wx.OK)
dlg.ShowModal()
dlg.Destroy()
else:
selectedValue = self.GetItemText(self.theSelectedIdx)
self.DeleteItem(self.theSelectedIdx)
(self.setTargets[self.theCurrentEnvironment]).remove(selectedValue)
def load(self,targets):
for idx,target in enumerate(targets):
targetName = target.name()
self.InsertStringItem(idx,targetName)
self.SetStringItem(idx,1,target.effectiveness())
self.SetStringItem(idx,2,target.rationale())
(self.setTargets[self.theCurrentEnvironment]).add(targetName)
def targets(self):
targetList = []
for x in range(self.GetItemCount()):
targetName = self.GetItemText(x)
targetEffectiveness = self.GetItem(x,1).GetText()
eRationale = self.GetItem(x,2).GetText()
targetList.append(Target(self.GetItemText(x),targetEffectiveness,eRationale))
return targetList
|
{
"content_hash": "5ab35c3e713377f77d293a5393c9962b",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 107,
"avg_line_length": 40.24545454545454,
"alnum_prop": 0.7239665687824712,
"repo_name": "nathanbjenx/cairis",
"id": "1c92cd752518e6ecd4084c7292d6b2f13ef5bae2",
"size": "5226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cairis/gui/TargetListCtrl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "588306"
},
{
"name": "Dockerfile",
"bytes": "829"
},
{
"name": "Gherkin",
"bytes": "1615"
},
{
"name": "HTML",
"bytes": "1664076"
},
{
"name": "JavaScript",
"bytes": "416319"
},
{
"name": "Mako",
"bytes": "13226"
},
{
"name": "PLpgSQL",
"bytes": "1494775"
},
{
"name": "Python",
"bytes": "4006311"
},
{
"name": "Shell",
"bytes": "7035"
}
],
"symlink_target": ""
}
|
"""Support for BloomSky weather station."""
from datetime import timedelta
import logging
from aiohttp.hdrs import AUTHORIZATION
import requests
import voluptuous as vol
from homeassistant.const import (
CONF_API_KEY,
HTTP_METHOD_NOT_ALLOWED,
HTTP_OK,
HTTP_UNAUTHORIZED,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["camera", "binary_sensor", "sensor"]
DOMAIN = "bloomsky"
# The BloomSky only updates every 5-8 minutes as per the API spec so there's
# no point in polling the API more frequently
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Schema({vol.Required(CONF_API_KEY): cv.string})}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the BloomSky integration."""
api_key = config[DOMAIN][CONF_API_KEY]
try:
bloomsky = BloomSky(api_key, hass.config.units.is_metric)
except RuntimeError:
return False
hass.data[DOMAIN] = bloomsky
for platform in PLATFORMS:
discovery.load_platform(hass, platform, DOMAIN, {}, config)
return True
class BloomSky:
"""Handle all communication with the BloomSky API."""
# API documentation at http://weatherlution.com/bloomsky-api/
API_URL = "http://api.bloomsky.com/api/skydata"
def __init__(self, api_key, is_metric):
"""Initialize the BookSky."""
self._api_key = api_key
self._endpoint_argument = "unit=intl" if is_metric else ""
self.devices = {}
self.is_metric = is_metric
_LOGGER.debug("Initial BloomSky device load...")
self.refresh_devices()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def refresh_devices(self):
"""Use the API to retrieve a list of devices."""
_LOGGER.debug("Fetching BloomSky update")
response = requests.get(
f"{self.API_URL}?{self._endpoint_argument}",
headers={AUTHORIZATION: self._api_key},
timeout=10,
)
if response.status_code == HTTP_UNAUTHORIZED:
raise RuntimeError("Invalid API_KEY")
if response.status_code == HTTP_METHOD_NOT_ALLOWED:
_LOGGER.error("You have no bloomsky devices configured")
return
if response.status_code != HTTP_OK:
_LOGGER.error("Invalid HTTP response: %s", response.status_code)
return
# Create dictionary keyed off of the device unique id
self.devices.update({device["DeviceID"]: device for device in response.json()})
|
{
"content_hash": "2026b389e5bd0005924c16e8de3825ef",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 88,
"avg_line_length": 31.452380952380953,
"alnum_prop": 0.6627554882664648,
"repo_name": "partofthething/home-assistant",
"id": "76ed9cdd12a0abac14793e410790b724448c273f",
"size": "2642",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/bloomsky/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.